code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package kafka.api
import java.util.Properties
import kafka.admin.AdminUtils
import kafka.consumer.SimpleConsumer
import kafka.integration.KafkaServerTestHarness
import kafka.server.{ClientQuotaManager, ClientConfigOverride, KafkaConfig, KafkaServer}
import kafka.utils.TestUtils
import org.apache.kafka.clients.consumer.{ConsumerConfig, KafkaConsumer}
import org.apache.kafka.clients.producer._
import org.apache.kafka.clients.producer.internals.ErrorLoggingCallback
import org.apache.kafka.common.MetricName
import org.apache.kafka.common.metrics.{Quota, KafkaMetric}
import org.junit.Assert.assertEquals
import org.junit.Assert.assertTrue
import org.junit.{After, Before, Test}
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.collection.Map
import scala.collection.mutable
class QuotasTest extends KafkaServerTestHarness {
private val producerBufferSize = 300000
private val producerId1 = "QuotasTestProducer-1"
private val producerId2 = "QuotasTestProducer-2"
private val consumerId1 = "QuotasTestConsumer-1"
private val consumerId2 = "QuotasTestConsumer-2"
val numServers = 2
val overridingProps = new Properties()
// Low enough quota that a producer sending a small payload in a tight loop should get throttled
overridingProps.put(KafkaConfig.ProducerQuotaBytesPerSecondDefaultProp, "8000")
overridingProps.put(KafkaConfig.ConsumerQuotaBytesPerSecondDefaultProp, "2500")
override def generateConfigs() = {
FixedPortTestUtils.createBrokerConfigs(numServers,
zkConnect,
enableControlledShutdown = false)
.map(KafkaConfig.fromProps(_, overridingProps))
}
var producers = mutable.Buffer[KafkaProducer[Array[Byte], Array[Byte]]]()
var consumers = mutable.Buffer[KafkaConsumer[Array[Byte], Array[Byte]]]()
var replicaConsumers = mutable.Buffer[SimpleConsumer]()
var leaderNode: KafkaServer = null
var followerNode: KafkaServer = null
private val topic1 = "topic-1"
@Before
override def setUp() {
super.setUp()
val producerProps = new Properties()
producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
producerProps.put(ProducerConfig.ACKS_CONFIG, "0")
producerProps.put(ProducerConfig.BLOCK_ON_BUFFER_FULL_CONFIG, "false")
producerProps.put(ProducerConfig.BUFFER_MEMORY_CONFIG, producerBufferSize.toString)
producerProps.put(ProducerConfig.CLIENT_ID_CONFIG, producerId1)
producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
classOf[org.apache.kafka.common.serialization.ByteArraySerializer])
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
classOf[org.apache.kafka.common.serialization.ByteArraySerializer])
producers += new KafkaProducer[Array[Byte], Array[Byte]](producerProps)
producerProps.put(ProducerConfig.CLIENT_ID_CONFIG, producerId2)
producers += new KafkaProducer[Array[Byte], Array[Byte]](producerProps)
val numPartitions = 1
val leaders = TestUtils.createTopic(zkUtils, topic1, numPartitions, numServers, servers)
leaderNode = if (leaders(0).get == servers.head.config.brokerId) servers.head else servers(1)
followerNode = if (leaders(0).get != servers.head.config.brokerId) servers.head else servers(1)
assertTrue("Leader of all partitions of the topic should exist", leaders.values.forall(leader => leader.isDefined))
// Create consumers
val consumerProps = new Properties
consumerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
consumerProps.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "QuotasTest")
consumerProps.setProperty(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 4096.toString)
consumerProps.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.bootstrapUrl)
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
classOf[org.apache.kafka.common.serialization.ByteArrayDeserializer])
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
classOf[org.apache.kafka.common.serialization.ByteArrayDeserializer])
consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, consumerId1)
consumers += new KafkaConsumer(consumerProps)
// Create replica consumers with the same clientId as the high level consumer. These requests should never be throttled
replicaConsumers += new SimpleConsumer("localhost", leaderNode.boundPort(), 1000000, 64*1024, consumerId1)
consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, consumerId2)
consumers += new KafkaConsumer(consumerProps)
replicaConsumers += new SimpleConsumer("localhost", leaderNode.boundPort(), 1000000, 64*1024, consumerId2)
}
@After
override def tearDown() {
producers.foreach( _.close )
consumers.foreach( _.close )
replicaConsumers.foreach( _.close )
super.tearDown()
}
@Test
def testThrottledProducerConsumer() {
val allMetrics: mutable.Map[MetricName, KafkaMetric] = leaderNode.metrics.metrics().asScala
val numRecords = 1000
produce(producers.head, numRecords)
val producerMetricName = new MetricName("throttle-time",
RequestKeys.nameForKey(RequestKeys.ProduceKey),
"Tracking throttle-time per client",
"client-id", producerId1)
assertTrue("Should have been throttled", allMetrics(producerMetricName).value() > 0)
// Consumer should read in a bursty manner and get throttled immediately
consume(consumers.head, numRecords)
// The replica consumer should not be throttled also. Create a fetch request which will exceed the quota immediately
val request = new FetchRequestBuilder().addFetch(topic1, 0, 0, 1024*1024).replicaId(followerNode.config.brokerId).build()
replicaConsumers.head.fetch(request)
val consumerMetricName = new MetricName("throttle-time",
RequestKeys.nameForKey(RequestKeys.FetchKey),
"Tracking throttle-time per client",
"client-id", consumerId1)
assertTrue("Should have been throttled", allMetrics(consumerMetricName).value() > 0)
}
@Test
def testProducerConsumerOverrideUnthrottled() {
// Give effectively unlimited quota for producerId2 and consumerId2
val props = new Properties()
props.put(ClientConfigOverride.ProducerOverride, Long.MaxValue.toString)
props.put(ClientConfigOverride.ConsumerOverride, Long.MaxValue.toString)
AdminUtils.changeClientIdConfig(zkUtils, producerId2, props)
AdminUtils.changeClientIdConfig(zkUtils, consumerId2, props)
TestUtils.retry(10000) {
val quotaManagers: Map[Short, ClientQuotaManager] = leaderNode.apis.quotaManagers
val overrideProducerQuota = quotaManagers.get(RequestKeys.ProduceKey).get.quota(producerId2)
val overrideConsumerQuota = quotaManagers.get(RequestKeys.FetchKey).get.quota(consumerId2)
assertEquals(s"ClientId $producerId2 must have unlimited producer quota", Quota.upperBound(Long.MaxValue), overrideProducerQuota)
assertEquals(s"ClientId $consumerId2 must have unlimited consumer quota", Quota.upperBound(Long.MaxValue), overrideConsumerQuota)
}
val allMetrics: mutable.Map[MetricName, KafkaMetric] = leaderNode.metrics.metrics().asScala
val numRecords = 1000
produce(producers(1), numRecords)
val producerMetricName = new MetricName("throttle-time",
RequestKeys.nameForKey(RequestKeys.ProduceKey),
"Tracking throttle-time per client",
"client-id", producerId2)
assertEquals("Should not have been throttled", 0.0, allMetrics(producerMetricName).value(), 0.0)
// The "client" consumer does not get throttled.
consume(consumers(1), numRecords)
// The replica consumer should not be throttled also. Create a fetch request which will exceed the quota immediately
val request = new FetchRequestBuilder().addFetch(topic1, 0, 0, 1024*1024).replicaId(followerNode.config.brokerId).build()
replicaConsumers(1).fetch(request)
val consumerMetricName = new MetricName("throttle-time",
RequestKeys.nameForKey(RequestKeys.FetchKey),
"Tracking throttle-time per client",
"client-id", consumerId2)
assertEquals("Should not have been throttled", 0.0, allMetrics(consumerMetricName).value(), 0.0)
}
def produce(p: KafkaProducer[Array[Byte], Array[Byte]], count: Int): Int = {
var numBytesProduced = 0
for (i <- 0 to count) {
val payload = i.toString.getBytes
numBytesProduced += payload.length
p.send(new ProducerRecord[Array[Byte], Array[Byte]](topic1, null, null, payload),
new ErrorLoggingCallback(topic1, null, null, true)).get()
Thread.sleep(1)
}
numBytesProduced
}
def consume(consumer: KafkaConsumer[Array[Byte], Array[Byte]], numRecords: Int) {
consumer.subscribe(List(topic1))
var numConsumed = 0
while (numConsumed < numRecords) {
for (cr <- consumer.poll(100)) {
numConsumed += 1
}
}
}
}
|
bluebreezecf/kafka
|
core/src/test/scala/integration/kafka/api/QuotasTest.scala
|
Scala
|
apache-2.0
| 10,078
|
/*
* Copyright 2013 - 2020 Outworkers Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.outworkers.phantom.jdk8
import java.time.{OffsetDateTime, ZonedDateTime}
import com.datastax.driver.core.utils.UUIDs
import com.outworkers.phantom.PhantomSuite
import com.outworkers.phantom.dsl._
import com.outworkers.phantom.tables.TimeUUIDRecord
import com.outworkers.util.samplers._
import org.scalacheck.Gen
import scala.concurrent.Future
class Jdk8TimeUUIDTests extends PhantomSuite {
override def beforeAll(): Unit = {
super.beforeAll()
database.timeuuidTable.createSchema()
}
it should "be able to store and retrieve a time slice of records based on an OffsetDateTime" in {
val interval = 60
val now = OffsetDateTime.now()
val start = now.plusMinutes(-interval)
val end = now.plusMinutes(interval)
val user = UUIDs.random()
val record = TimeUUIDRecord(
user,
UUIDs.timeBased(),
gen[String]
)
/**
* Cassandra sometimes skews the timestamp of this date by exactly 1 milliseconds
* for reasons beyond our understanding, which means the test is flaky unless this
* list is added to make sure at least one of t, t minus 1 millisecond, or t plus 1 millisecond
* is found in the expected list of records.
*/
val recordList = record :: Nil
val minuteOffset = start.plusMinutes(-1).timeuuid
val secondOffset = start.plusSeconds(-15).timeuuid
val record1 = TimeUUIDRecord(
user,
minuteOffset,
gen[String]
)
val record2 = TimeUUIDRecord(
user,
secondOffset,
gen[String]
)
val chain = for {
_ <- database.timeuuidTable.store(record).future()
_ <- database.timeuuidTable.store(record1).future()
_ <- database.timeuuidTable.store(record2).future()
get <- database.timeuuidTable.select
.where(_.user eqs record.user)
.and(_.id <= maxTimeuuid(end))
.and(_.id >= minTimeuuid(start))
.fetch()
get2 <- database.timeuuidTable.select
.where(_.user eqs record.user)
.and(_.id >= minTimeuuid(start.plusMinutes(-2)))
.and(_.id <= maxTimeuuid(end))
.fetch()
} yield (get, get2)
whenReady(chain) { case (res, res2) =>
info("At least one timestamp value, including potential time skewes, should be included here")
recordList exists(res contains) shouldEqual true
info("Should not contain record with a timestamp 1 minute before the selection window")
res should not contain record1
info("Should not contain record with a timestamp 15 seconds before the selection window")
res should not contain record2
info("Should contain all elements if we expand the selection window by 1 minute")
res2.find(_.id == record.id) shouldBe defined
res2.find(_.id == record1.id) shouldBe defined
res2.find(_.id == record2.id) shouldBe defined
}
}
it should "be able to store and retrieve a time slice of records based on ZonedDateTime" in {
val interval = 60
val now = ZonedDateTime.now()
val start = now.plusMinutes(-interval)
val end = now.plusMinutes(interval)
val user = UUIDs.random()
val record = TimeUUIDRecord(
user,
UUIDs.timeBased(),
gen[String]
)
/**
* Cassandra sometimes skews the timestamp of this date by exactly 1 milliseconds
* for reasons beyond our understanding, which means the test is flaky unless this
* list is added to make sure at least one of t, t minus 1 millisecond, or t plus 1 millisecond
* is found in the expected list of records.
*/
val recordList = record :: Nil
val minuteOffset = start.plusMinutes(-1).timeuuid
val secondOffset = start.plusSeconds(-15).timeuuid
val record1 = TimeUUIDRecord(
user,
minuteOffset,
gen[String]
)
val record2 = TimeUUIDRecord(
user,
secondOffset,
gen[String]
)
val chain = for {
_ <- database.timeuuidTable.store(record).future()
_ <- database.timeuuidTable.store(record1).future()
_ <- database.timeuuidTable.store(record2).future()
one <- database.timeuuidTable.select
.where(_.user eqs record.user)
.and(_.id >= minTimeuuid(start))
.and(_.id <= maxTimeuuid(end))
.fetch()
one2 <- database.timeuuidTable.select
.where(_.user eqs record.user)
.and(_.id >= minTimeuuid(start.plusMinutes(-2)))
.and(_.id <= maxTimeuuid(end))
.fetch()
} yield (one, one2)
whenReady(chain) { case (res, res2) =>
info("At least one timestamp value, including potential time skewes, should be included here")
recordList exists(res contains) shouldEqual true
info("Should not contain record with a timestamp 1 minute before the selection window")
res should not contain record1
info("Should not contain record with a timestamp 15 seconds before the selection window")
res should not contain record2
info("Should contain all elements if we expand the selection window by 1 minute")
res2.find(_.id == record.id) shouldBe defined
res2.find(_.id == record1.id) shouldBe defined
res2.find(_.id == record2.id) shouldBe defined
}
}
it should "be able to store and retrieve a fixed time slice of records with prepared statements" in {
val interval = 60
val now = ZonedDateTime.now()
val start = now.plusMinutes(-interval)
val end = now.plusMinutes(interval)
val user = UUIDs.random()
val record = TimeUUIDRecord(
user,
UUIDs.timeBased(),
gen[String]
)
val minuteOffset = start.plusMinutes(-1).timeuuid
val secondOffset = start.plusSeconds(-15).timeuuid
val record1 = TimeUUIDRecord(
user,
minuteOffset,
gen[String]
)
val record2 = TimeUUIDRecord(
user,
secondOffset,
gen[String]
)
val query = database.timeuuidTable.select
.where(_.user eqs ?)
.and(_.id > minTimeuuid(?))
.and(_.id < maxTimeuuid(?))
.prepareAsync()
val chain = for {
_ <- database.timeuuidTable.store(record).future()
_ <- database.timeuuidTable.store(record1).future()
_ <- database.timeuuidTable.store(record2).future()
one <- query.flatMap(_.bind(record.user, start.toInstant.toEpochMilli, end.toInstant.toEpochMilli).fetch())
one2 <- query.flatMap(_.bind(
record.user,
start.plusMinutes(-2).toInstant.toEpochMilli,
end.toInstant.toEpochMilli
).fetch())
} yield (one, one2)
whenReady(chain) { case (res, res2) =>
info("At least one timestamp value, including potential time skews, should be included here")
res should contain (record)
info("Should not contain record with a timestamp 1 minute before the selection window")
res should not contain record1
info("Should not contain record with a timestamp 15 seconds before the selection window")
res should not contain record2
info("Should contain all elements if we expand the selection window by 1 minute")
res2.find(_.id == record1.id) shouldBe defined
}
}
it should "be able to store and retrieve a time slice of records with prepared statements" in {
val interval = 60
val now = ZonedDateTime.now()
val start = now.plusMinutes(-interval)
val end = now.plusMinutes(interval)
val user = UUIDs.random()
val record = TimeUUIDRecord(
user,
UUIDs.timeBased(),
gen[String]
)
/**
* Cassandra sometimes skews the timestamp of this date by exactly 1 milliseconds
* for reasons beyond our understanding, which means the test is flaky unless this
* list is added to make sure at least one of t, t minus 1 millisecond, or t plus 1 millisecond
* is found in the expected list of records.
*/
val recordList = record :: Nil
val minuteOffset = start.plusMinutes(-1).timeuuid
val secondOffset = start.plusSeconds(-15).timeuuid
val record1 = TimeUUIDRecord(
user,
minuteOffset,
gen[String]
)
val record2 = TimeUUIDRecord(
user,
secondOffset,
gen[String]
)
val query = database.timeuuidTable.select
.where(_.user eqs ?)
.and(_.id >= minTimeuuid(?))
.and(_.id <= maxTimeuuid(?))
.prepareAsync()
val chain = for {
_ <- database.timeuuidTable.store(record).future()
_ <- database.timeuuidTable.store(record1).future()
_ <- database.timeuuidTable.store(record2).future()
one <- query.flatMap(_.bind(record.user, start.toInstant.toEpochMilli, end.toInstant.toEpochMilli).fetch())
one2 <- query.flatMap(_.bind(
record.user,
start.plusMinutes(-2).toInstant.toEpochMilli,
end.toInstant.toEpochMilli
).fetch())
} yield (one, one2)
whenReady(chain) { case (res, res2) =>
info("At least one timestamp value, including potential time skewes, should be included here")
recordList exists(res contains) shouldEqual true
info("Should not contain record with a timestamp 1 minute before the selection window")
res should not contain record1
info("Should not contain record with a timestamp 15 seconds before the selection window")
res should not contain record2
info("Should contain all elements if we expand the selection window by 1 minute")
res2.find(_.id == record.id) shouldBe defined
res2.find(_.id == record1.id) shouldBe defined
res2.find(_.id == record2.id) shouldBe defined
}
}
it should "not retrieve anything for a mismatched selection time window using ZonedDateTime" in {
val intervalOffset = 60
val now = ZonedDateTime.now()
val start = now.plusSeconds(-intervalOffset)
val user = UUIDs.random()
// I will repent for my sins in the future, I'm sorry Ben.
val records = genList[TimeUUIDRecord]()
.map(_.copy(
user = user,
id = now.plusSeconds(
Gen.choose(
-intervalOffset,
intervalOffset
).sample.value
).timeuuid)
)
val chain = for {
_ <- Future.sequence(records.map(r => database.timeuuidTable.store(r).future()))
one <- database.timeuuidTable.select
.where(_.user eqs user)
.and(_.id >= minTimeuuid(start.plusSeconds(-3 * intervalOffset)))
.and(_.id <= maxTimeuuid(start.plusSeconds(-2 * intervalOffset)))
.fetch()
} yield one
whenReady(chain) { res =>
res.size shouldEqual 0
}
}
it should "not retrieve anything for a mismatched selection time window using OffsetDateTime" in {
val intervalOffset = 60
val now = OffsetDateTime.now()
val start = now.plusSeconds(-intervalOffset)
val user = UUIDs.random()
// I will repent for my sins in the future, I'm sorry Ben.
val records = genList[TimeUUIDRecord]()
.map(_.copy(
user = user,
id = now.plusSeconds(
Gen.choose(
-intervalOffset,
intervalOffset
).sample.value
).timeuuid)
)
val chain = for {
_ <- database.timeuuidTable.storeRecords(records)
records <- database.timeuuidTable.select
.where(_.user eqs user)
.and(_.id >= minTimeuuid(start.plusSeconds(-3 * intervalOffset)))
.and(_.id <= maxTimeuuid(start.plusSeconds(-2 * intervalOffset)))
.fetch()
} yield records
whenReady(chain) { res =>
res.size shouldEqual 0
}
}
}
|
outworkers/phantom
|
phantom-dsl/src/test/scala/com/outworkers/phantom/jdk8/Jdk8TimeUUIDTests.scala
|
Scala
|
apache-2.0
| 12,127
|
/**
* Copyright (c) 2014, MoonGene. All rights reserved.
*
* This source code is licensed under the GPL license found in the
* LICENSE_GPL file in the root directory of this source tree. An alternative
* commercial license is also available upon request.
*/
package controllers
import play.api.mvc.{Action, Controller}
import services.{EmailService, EmailMessage}
import play.api.data._
import play.api.data.Forms._
import org.apache.commons.codec.digest.DigestUtils
import play.api.libs.json.Json
import play.api.Play
/*
Email controller:
- send emails
*/
object Email extends Controller {
val secretKey = Play.current.configuration.getString("application.secretKey").getOrElse("")
val form: Form[EmailMessage] = Form(
mapping(
"to" -> nonEmptyText,
"from" -> nonEmptyText,
"replyTo" -> optional(text),
"cc" -> optional(text),
"topic" -> nonEmptyText,
"body" -> nonEmptyText,
"template" -> optional(text)
)(EmailMessage.apply)(EmailMessage.unapply)
)
def send(hash: String) = Action { implicit request =>
form.bindFromRequest.fold(
formWithErrors => BadRequest,
message => {
if (!hash.equals(DigestUtils.md5Hex(message.to + secretKey))) {
Unauthorized(Json.obj("code" -> 1, "message" -> "Unauthorized"))
} else {
EmailService.sendEmail(message)
Ok
}
}
)
}
}
|
MoonGene/Analytics
|
src/moon/app/controllers/Email.scala
|
Scala
|
gpl-3.0
| 1,417
|
package com.intenthq.pucket.writer
import com.intenthq.pucket.Pucket
import scalaz.\\/
import scalaz.syntax.either._
/** Incremental functional writer class for parquet
*
* ==Overview==
* Allows large files to be written to parquet in smaller incements.
* Closes the underlying writer and creates a new file at a defined
* writer count.
*
* Also keeps a checkpoint of when the last file was sucessfully
* flushed to the filesystem so in the event of an error processing
* can be restarted from that point.
*
* @tparam T type of data to be written
*/
case class IncrementalWriter[T] private (override val checkPoint: Long,
writeCounter: Long,
writer: Writer[T, Throwable],
pucket: Pucket[T],
maxWrites: Long) extends Writer[T, (Long, Throwable)] {
type Error = (Long, Throwable)
/** Write data incrementally
*
* @param data the data to be written
* @param newCheckPoint the current progress of processing in the subject file
* @return the writer instance or an error message with the checkpoint if the write fails
*/
def write(data: T, newCheckPoint: Long): Error \\/ IncrementalWriter[T] = {
if (writeCounter < maxWrites) append(data)
else for {
_ <- close
newWriter <- pucket.writer.leftMap((checkPoint, _))
_ <- newWriter.write(data).leftMap((checkPoint, _))
} yield IncrementalWriter[T](newCheckPoint, 1, newWriter, pucket, maxWrites)
}
/** @inheritdoc */
def close: Error \\/ Unit =
writer.close.leftMap((checkPoint, _))
private def append(data: T): Error \\/ IncrementalWriter[T] =
writer.write(data).fold(
th => (checkPoint, th).left,
_ => IncrementalWriter[T](checkPoint, writeCounter + 1, writer, pucket, maxWrites).right
)
}
/** Factory object for [[com.intenthq.pucket.writer.IncrementalWriter]] */
object IncrementalWriter {
/** Create a new instance of incremental writer
*
* @param checkPoint the checkpoint progress through the file
* @param pucket a pucket instance from which to obtain underlying writers
* @param maxWrites the maximum number of writes before
* the current parquet file gets rolled
* @tparam T type of data to be written
* @return a new writer instance or an error message
* with the checkpoint if the write fails
*/
def apply[T](checkPoint: Long,
pucket: Pucket[T],
maxWrites: Long): (Long, Throwable) \\/ IncrementalWriter[T] =
pucket.writer.
fold(th => (checkPoint, th).left, IncrementalWriter(checkPoint, 0, _, pucket, maxWrites).right)
}
|
intenthq/pucket
|
core/src/main/scala/com/intenthq/pucket/writer/IncrementalWriter.scala
|
Scala
|
mit
| 2,748
|
package org.cddcore.rendering.view
import org.cddcore.enginecomponents.Scenario
import org.cddcore.rendering.{ReferenceMapMakers, RenderContext}
class ScenarioView(linkView: LinkView, exceptionView: ExceptionView) extends View[Scenario[_, _]] with ReferenceMapMakers {
import View._
override def apply(s: Scenario[_, _])(implicit renderContext: RenderContext): Map[String, Object] = {
val raw = Map(
Id -> renderContext.idPath(s),
Type -> findTypeName(s),
linkKey -> linkView(s),
Title -> s.title,
summaryKey -> renderContext.displayProcessor.summary(s),
Comment -> s.comment.getOrElse(""),
situationKey -> renderContext.displayProcessor.html(s.situation),
expectedKey -> s.expectedOption.map(expected => renderContext.displayProcessor.html(expected)).getOrElse("<Not Known>"),
References -> s.references.map(referenceToMap(renderContext)))
renderContext.exceptions.get(s).fold(raw)(e => raw + (Error -> exceptionView(e)))
}
}
|
phil-rice/CddCore2
|
module/rendering/src/main/scala/org/cddcore/rendering/view/ScenarioView.scala
|
Scala
|
bsd-2-clause
| 1,026
|
/*
* This file is part of the diffson project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package diffson
import lcs.Lcs
import cats._
import scala.language.higherKinds
package object jsonmergepatch {
implicit def JsonMergePatchPatch[F[_], Json](implicit F: MonadError[F, Throwable], Json: Jsony[Json]): Patch[F, Json, JsonMergePatch[Json]] =
new Patch[F, Json, JsonMergePatch[Json]] {
def apply(json: Json, patch: JsonMergePatch[Json]): F[Json] =
patch[F](json)
}
implicit def JsonMergeDiffDiff[Json: Jsony]: Diff[Json, JsonMergePatch[Json]] =
new JsonMergeDiff[Json]
}
|
gnieh/diffson
|
core/src/main/scala/diffson/jsonmergepatch/package.scala
|
Scala
|
apache-2.0
| 1,110
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.s2graph.core.storage.hbase
import java.lang.Integer.valueOf
import java.nio.charset.StandardCharsets
import java.util.concurrent.Callable
import net.bytebuddy.ByteBuddy
import net.bytebuddy.description.modifier.Visibility.PUBLIC
import net.bytebuddy.dynamic.loading.ClassLoadingStrategy
import net.bytebuddy.implementation.FieldAccessor
import net.bytebuddy.implementation.MethodDelegation.to
import net.bytebuddy.implementation.bind.annotation.{SuperCall, This}
import net.bytebuddy.matcher.ElementMatchers._
import org.apache.commons.io.IOUtils
import org.hbase.async._
import org.objectweb.asm.Opcodes.{ACC_FINAL, ACC_PRIVATE, ACC_PROTECTED, ACC_PUBLIC}
import org.objectweb.asm._
import scala.collection.JavaConversions._
/**
* Upon initialization, it loads a patched version of Asynchbase's Scanner class,
* modified using ASM to make the classes non-final and their methods are all public,
* so that ByteBuddy can create subclasses of them.
*
* This object has to be initialized before any access to (i.e. any classloading of) Asynchbase,
* since the ClassLoader does not allow redefining already loaded classes unless we use instrumentation.
*/
object AsynchbasePatcher {
/** invoking this method will force the classloading of this object, thus triggering the patch mechanism below */
def init(): Unit = {
val methods = scannerClass.getMethods.map(_.getName)
assert(methods.contains("getRpcTimeout"))
assert(methods.contains("setRpcTimeout"))
}
/** instantiate a new Scanner, patched to support RPC timeout */
def newScanner(client: HBaseClient, table: Array[Byte]): ScannerExtra = {
val constructor = scannerClass.getConstructor(classOf[HBaseClient], BA)
constructor.setAccessible(true)
constructor.newInstance(client, table).asInstanceOf[ScannerExtra]
}
/** instantiate a new Scanner, patched to support RPC timeout */
def newScanner(client: HBaseClient, table: String): ScannerExtra = {
newScanner(client, table.getBytes(StandardCharsets.UTF_8))
}
trait RpcTimeout {
def getRpcTimeout: Int
def setRpcTimeout(timeout: Int): Unit
}
type ScannerExtra = Scanner with RpcTimeout
val interceptor = new Object() {
def getNextRowsRequest(@This scanner: ScannerExtra, @SuperCall getNextRowsRequest: Callable[HBaseRpc]): HBaseRpc = {
val request = getNextRowsRequest.call()
val rpcTimeout = scanner.getRpcTimeout
if (rpcTimeout > 0) {
request.setTimeout(rpcTimeout)
}
request
}
}
private val BA = classOf[Array[Byte]]
private val classLoader = getClass.getClassLoader
private val defineClass = classOf[ClassLoader].getDeclaredMethod("defineClass", classOf[String], BA, classOf[Int], classOf[Int])
/** a java.lang.Class instance for the patched Scanner class */
private val scannerClass = {
new ByteBuddy()
.subclass(loadClass("Scanner"))
.name("org.hbase.async.ScannerEx")
.implement(classOf[RpcTimeout]).intercept(FieldAccessor.ofBeanProperty())
.defineField("rpcTimeout", classOf[Int], PUBLIC)
.method(named("getNextRowsRequest")).intercept(to(interceptor))
.make.load(classLoader, ClassLoadingStrategy.Default.INJECTION).getLoaded
}
/** loads Asynchbase classes from s2core's classpath
* *MUST* be called before any access to those classes,
* otherwise the classloading will fail with an "attempted duplicate class definition" error.
**/
private def loadClass(name: String): Class[_] = {
classLoader.getResources(s"org/hbase/async/$name.class").toSeq.headOption match {
case Some(url) =>
val stream = url.openStream()
val bytes = try { IOUtils.toByteArray(stream) } finally { stream.close() }
// patch the bytecode so that the class is no longer final and the methods are all accessible
val cw = new ClassWriter(ClassWriter.COMPUTE_FRAMES)
new ClassReader(bytes).accept(new ClassAdapter(cw) {
override def visit(version: Int, access: Int, name: String, signature: String, superName: String, interfaces: Array[String]): Unit = {
super.visit(version, access & ~ACC_FINAL, name, signature, superName, interfaces)
}
override def visitMethod(access: Int, name: String, desc: String, signature: String, exceptions: Array[String]): MethodVisitor = {
super.visitMethod(access & ~ACC_PRIVATE & ~ACC_PROTECTED & ~ACC_FINAL | ACC_PUBLIC, name, desc, signature, exceptions)
}
}, 0)
val patched = cw.toByteArray
defineClass.setAccessible(true)
defineClass.invoke(classLoader, s"org.hbase.async.$name", patched, valueOf(0), valueOf(patched.length)).asInstanceOf[Class[_]]
case None =>
throw new ClassNotFoundException(s"Could not find Asynchbase class: $name")
}
}
}
|
daewon/incubator-s2graph
|
s2core/src/main/scala/org/apache/s2graph/core/storage/hbase/AsynchbasePatcher.scala
|
Scala
|
apache-2.0
| 5,659
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.python
import org.apache.spark.api.python.PythonFunction
import org.apache.spark.sql.Column
import org.apache.spark.sql.catalyst.expressions.{Expression, PythonUDF}
import org.apache.spark.sql.types.DataType
/**
* A user-defined Python function. This is used by the Python API.
*/
case class UserDefinedPythonFunction(
name: String,
func: PythonFunction,
dataType: DataType,
pythonEvalType: Int,
udfDeterministic: Boolean) {
def builder(e: Seq[Expression]): Expression = {
PythonUDF(name, func, dataType, e, pythonEvalType, udfDeterministic)
}
/** Returns a [[Column]] that will evaluate to calling this UDF with the given input. */
def apply(exprs: Column*): Column = {
val udf = builder(exprs.map(_.expr))
Column(udf)
}
}
|
ueshin/apache-spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/python/UserDefinedPythonFunction.scala
|
Scala
|
apache-2.0
| 1,613
|
/**
* (c) 2012 Mark Lister
*
* This software is licenced under the Apache Licence 2.0
*
* Quick and dirty FreeDns.afraid.org client -- written because inadyn-mt wasted several hours of my life
* This aims to support ip6 / teredo / miredo
*/
package org.catch22.freedns
import java.net.NetworkInterface
import java.net.URL
import java.lang.IllegalArgumentException
import scala.io.Source
import scala.collection.JavaConversions._
object FreeDns extends optional.Application{
val defaultUrl="http://freedns.afraid.org/dynamic/update.php?%1$s&address=%2$s"
def main(hashCode:String,url:Option[String],address:Option[String], verbose:Boolean=false): Unit = {
//optionally sniff the teredo address
val raw=NetworkInterface.getNetworkInterfaces.filter(_.getName.startsWith("teredo"))
.flatMap(_.getInterfaceAddresses).map(_.getAddress)
.filter(_.toString.startsWith("/2001")).mkString
val e = raw.lastIndexOf("%")
val teredoAddr=raw.substring(1, e)
if (verbose) println ("Sniffed teredo addr:"+teredoAddr)
val u= new URL(url getOrElse(defaultUrl) format (hashCode, address.getOrElse(teredoAddr)))
if(verbose) println ("url:"+u)
val con=u.openConnection
con.connect
val result = Source.fromInputStream(con.getInputStream).getLines.mkString
println(result)
}
}
|
marklister/teredo-ddns-client
|
src/main/scala/FreeDns.scala
|
Scala
|
apache-2.0
| 1,392
|
package filodb.coordinator
import org.velvia.filo.RowReader
import filodb.core._
import filodb.core.metadata.{Dataset, DataColumn, Projection}
// Public, external Actor/Akka API for NodeCoordinatorActor, so every incoming command should be a NodeCommand
sealed trait NodeCommand
sealed trait NodeResponse
object DatasetCommands {
/**
* Creates a new dataset with columns and a default projection.
* @param dataset the Dataset object
* @param columns DataColumns to create for that dataset. Must include partition and row key columns, at a
* minimum. Computed columns can be left out.
* @param database optionally, the database/keyspace to create the dataset in
*/
case class CreateDataset(dataset: Dataset,
columns: Seq[DataColumn],
database: Option[String] = None) extends NodeCommand
case object DatasetCreated extends Response with NodeResponse
case object DatasetAlreadyExists extends Response with NodeResponse
case class DatasetError(msg: String) extends ErrorResponse with NodeResponse
/**
* Truncates all data from a projection of a dataset. Waits for any pending flushes from said
* dataset to finish first, and also clears the columnStore cache for that dataset.
*/
case class TruncateProjection(projection: Projection, version: Int) extends NodeCommand
case object ProjectionTruncated extends NodeResponse
/**
* Drops all versions/projections of a dataset from both the column store and metastore.
*/
case class DropDataset(dataset: DatasetRef) extends NodeCommand
case object DatasetDropped extends NodeResponse
}
object IngestionCommands {
/**
* Sets up ingestion for a given dataset, version, and schema of columns.
* The dataset and columns must have been previously defined.
*
* @return BadSchema if the partition column is unsupported, sort column invalid, etc.
*/
case class SetupIngestion(dataset: DatasetRef,
schema: Seq[String],
version: Int) extends NodeCommand
case object IngestionReady extends NodeResponse
case object UnknownDataset extends ErrorResponse with NodeResponse
case class UndefinedColumns(undefined: Set[String]) extends ErrorResponse with NodeResponse
case class BadSchema(message: String) extends ErrorResponse with NodeResponse
/**
* Ingests a new set of rows for a given dataset and version.
* The partitioning column and sort column are set up in the dataset.
*
* @param seqNo the sequence number to be returned for acknowledging the entire set of rows
* @return Ack(seqNo) returned when the set of rows has been committed to the MemTable.
*/
case class IngestRows(dataset: DatasetRef,
version: Int,
rows: Seq[RowReader],
seqNo: Long) extends NodeCommand
case class Ack(seqNo: Long) extends NodeResponse
/**
* Initiates a flush of the remaining MemTable rows of the given dataset and version.
* Usually used when at the end of ingesting some large blob of data.
* @return Flushed when the flush cycle has finished successfully, commiting data to columnstore.
*/
case class Flush(dataset: DatasetRef, version: Int) extends NodeCommand
case object Flushed extends NodeResponse
case object FlushIgnored extends NodeResponse
/**
* Checks to see if the DatasetCoordActor is ready to take in more rows. Usually sent when an actor
* is in a wait state.
*/
case class CheckCanIngest(dataset: DatasetRef, version: Int) extends NodeCommand
case class CanIngest(can: Boolean) extends NodeResponse
/**
* Gets the latest ingestion stats from the DatasetCoordinatorActor
*/
case class GetIngestionStats(dataset: DatasetRef, version: Int) extends NodeCommand
}
|
markhamstra/FiloDB
|
coordinator/src/main/scala/filodb.coordinator/NodeCommands.scala
|
Scala
|
apache-2.0
| 3,849
|
package miniconf.server
import akka.event.LoggingAdapter
import akka.http.scaladsl.model._
import StatusCodes._
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.unmarshalling._
import akka.actor._
import miniconf.server.persistence.MiniConfPersistActor.{DeleteCmd, Cmd}
import miniconf.server.persistence.{MiniConfPersistActor}
import miniconf.server.replicator.ReplicatedService
import spray.json.DefaultJsonProtocol
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import spray.json._
/**
* Created by netcomm on 2015/11/10.
*/
object MainHttpService extends SprayJsonSupport {
import akka.pattern.ask
import DefaultJsonProtocol._
import ServerService._
implicit val OneConfItemWireFormat = jsonFormat4(OneConfItem.apply)
implicit val RetrieveOneConfItemWireFormat = jsonFormat2(RetrieveOneConfItem.apply)
implicit val CheckDataModifyWireFormat = jsonFormat3(CheckDataModify.apply)
implicit val GetPaginationWireFormat = jsonFormat3(GetPagination.apply)
implicit val DeleteCmdWireFormat = jsonFormat3(DeleteCmd.apply)
implicit val onePaginationInfoFormat = jsonFormat2(PaginationInfo)
object ServerService
{
case class OneConfItem(group: String, key: String, value: String, timestamp: String)
{
def getKey: String = {group + "_" + key}
}
case class RetrieveOneConfItem(group: String, key: String)
case class CheckDataModify(group: String, key: String, value: String)
case class GetPagination(curPageNum: String, searchGroup: String, searchKey: String)
case class PaginationInfo(totalPageNum: Int, oneBatchItems: List[Array[String]])
def generateConfItemKey(group: String, key: String): String = {group + "_" + key}
/*implicit val retrieveOneConfItemUM: FromRequestUnmarshaller[RetrieveOneConfItem] = ???
implicit val checkDataModifyUM: FromRequestUnmarshaller[CheckDataModify] = ???
implicit val deleteCmdUM: FromRequestUnmarshaller[DeleteCmd] = ???
implicit val paginationInfoUM: FromRequestUnmarshaller[PaginationInfo] = ???
implicit val getPaginationUM: FromRequestUnmarshaller[GetPagination] = ???
implicit val oneConfItemUM: FromRequestUnmarshaller[OneConfItem] = ???*/
}
def props(interface: String, port: Int): Props =
Props(new MainHttpService(interface, port))
private def route(miniConfPersistActorParm: ActorRef, replicatedServiceActor : ActorRef,
log : LoggingAdapter) = {
import akka.http.scaladsl.unmarshalling.FromRequestUnmarshaller
import akka.http.scaladsl.server.Directives._
import akka.util.Timeout
import scala.concurrent.duration._
implicit val timeout = Timeout(3 seconds)
def assets = getFromResourceDirectory("web") ~ pathSingleSlash(getFromResource("web/index.html"))
def saveOneConfItemFlow =
path("miniConf" / "saveOneConfItem") {
post {
entity(as[OneConfItem]) {oneConfItemParm =>
onSuccess(miniConfPersistActorParm ? Cmd(oneConfItemParm)) {
case "" => complete(HttpResponse(entity = ""))
case errorStr : String if errorStr != "" =>
{
complete(HttpResponse(InternalServerError, entity = "saveOneConfItemFromGet occur error! "+errorStr))
}
}
}
}
}
def getOneConfItemFlow =
path("miniConf" / "getOneConfItem") {
get {
parameters('group.as[String], 'key.as[String]).as(RetrieveOneConfItem) {retrieveOneConfItemParm =>
log.info("1 retrieveOneConfItem " + retrieveOneConfItemParm)
onSuccess(replicatedServiceActor ? retrieveOneConfItemParm) {
case retOneConfItemParm: OneConfItem =>
log.info("retValueParm " + retOneConfItemParm)
complete(HttpResponse(entity = retOneConfItemParm.asInstanceOf[OneConfItem].value))
case None =>
complete(HttpResponse(NotFound, entity = "not find one!"))
}
}
}
}
def checkDataModifyFlow =
path("miniConf" / "checkDataModify") {
get {
parameters('group.as[String], 'key.as[String],
'value.as[String]).as(CheckDataModify) { checkDataModifyParm =>
log.info("1 checkDataModify " + checkDataModifyParm)
onSuccess(replicatedServiceActor ? checkDataModifyParm) {
case retValueParm : String =>
complete(HttpResponse(entity = retValueParm.asInstanceOf[String]))
}
}
}
}
def getCurPaginationFlow =
path("miniConf" / "getCurPagination") {
post {
entity(as[GetPagination]) { getPaginationParm =>
log.info("1 getPagination " + getPaginationParm)
onSuccess(replicatedServiceActor ? getPaginationParm) {
case pageInfo : PaginationInfo =>
complete(HttpResponse(entity = pageInfo.toJson.compactPrint))
}
}
}
}
def deleteOneConfItemFlow =
path("miniConf" / "deleteOneConfItem") {
post {
entity(as[DeleteCmd]) { deleteCmdParm =>
log.info("1 deleteCmd " + deleteCmdParm)
onSuccess(miniConfPersistActorParm ? deleteCmdParm) {
case "" => complete("")
case errorStr : String if errorStr != "" =>
{
log.info("deleteOneConfItem occur error! "+errorStr)
complete(HttpResponse(InternalServerError, entity = "deleteOneConfItem occur error! "+errorStr))
}
}
}
}
}
assets ~ saveOneConfItemFlow ~ getOneConfItemFlow ~ checkDataModifyFlow ~ getCurPaginationFlow ~ deleteOneConfItemFlow
}
}
class MainHttpService(interface: String, port: Int) extends Actor with ActorLogging {
import MainHttpService._
import context.dispatcher
import akka.http.scaladsl.Http
import akka.stream.ActorMaterializer
implicit val system = context.system
implicit val materializer = ActorMaterializer()
val replicatedServiceActor = context.actorOf(Props[ReplicatedService], name = "replicatedServiceActor")
val miniConfPersistActor = context.actorOf(
Props(classOf[MiniConfPersistActor], replicatedServiceActor), "miniConfPersistActor")
Http().bindAndHandle(Route.handlerFlow(route(miniConfPersistActor, replicatedServiceActor, log)), interface, port)
override def receive = {
case Http.ServerBinding(address) => log.info("Listening on {}", address)
case Status.Failure(_) => context.stop(self)
}
}
|
netcomm/miniconf
|
src/main/scala/miniconf/server/MainHttpService.scala
|
Scala
|
apache-2.0
| 6,582
|
package com.typeassimilation.scala
import org.eclipse.gef4.zest.fx.ui.parts.ZestFxUiView
import com.google.inject.Guice
import com.google.inject.util.Modules
import org.eclipse.gef4.zest.fx.ZestFxModule
import org.eclipse.gef4.zest.fx.ui.ZestFxUiModule
import com.typeassimilation.model.Model
import scala.collection.mutable
import collection.JavaConversions._
import org.eclipse.gef4.graph.{ Edge, Node, Graph }
import org.eclipse.gef4.zest.fx.ZestProperties
import scalafx.Includes._
import scalafx.collections.ObservableBuffer
import scalafx.collections.ObservableBuffer.{ Add, Remove, Reorder, Update }
import com.typeassimilation.model.ModelPesistence
import java.io.File
import org.eclipse.gef4.layout.algorithms.SpringLayoutAlgorithm
import scalafx.collections.ObservableSet
import com.typeassimilation.model.DataType
import com.typeassimilation.model.Assimilation
class MultiEdge(initInboundEdges: Seq[Edge], initNode: Node, initOutboundEdges: Seq[Edge]) {
val inboundEdges = mutable.ListBuffer(initInboundEdges: _*)
var node = initNode
val outboundEdges = mutable.ListBuffer(initOutboundEdges: _*)
def edges = inboundEdges.toSeq ++ outboundEdges
}
class ScalalalaActualView extends ZestFxUiView(Guice.createInjector(Modules.`override`(new ZestFxModule()).`with`(new ZestFxUiModule()))) {
val model = ModelPesistence.readDirectory(new File("C:/eclipse-workspaces/luna-experiment/type-assimilation-testing"))
val typeNameToNodeMap = mutable.Map.empty[String, Node]
def toNode(dt: DataType) = {
val node = new Node
ZestProperties.setLabel(node, dt.name)
node
}
def toMultiEdge(a: Assimilation): MultiEdge = {
val node = new Node
???
}
def name(n: Node) = n.getAttrs.apply(ZestProperties.ELEMENT_LABEL).toString
val assimilationNameToEdgeMap = mutable.Map.empty[String, MultiEdge]
val graph = {
val nodes = model.dataTypes.map(toNode(_))
typeNameToNodeMap ++= nodes.map(n => name(n) -> n).toMap
val edges = model.dataTypes.flatMap(_.assimilations).map(toMultiEdge(_))
assimilationNameToEdgeMap ++= edges.map(me => name(me.node) -> me)
val graphAttributes = Map(
ZestProperties.GRAPH_TYPE -> ZestProperties.GRAPH_TYPE_DIRECTED,
ZestProperties.GRAPH_LAYOUT -> new SpringLayoutAlgorithm)
new Graph(graphAttributes, nodes.toSeq ++ edges.map(_.node), edges.flatMap(_.edges).toSeq)
}
setGraph(graph)
}
|
kalgoodman/type-assimilation
|
com.typeassimilation.zest.examples.graph.ui/src/com/typeassimilation/scala/ScalalalaActualView.scala
|
Scala
|
epl-1.0
| 2,391
|
package com.typesafe.sbt
package packager
package debian
import sbt._
import sbt.Keys.{name, packageBin, sourceDirectory, streams, target, version, TaskStreams}
import packager.Keys._
import packager.Hashing
import linux.LinuxPlugin.autoImport.{
daemonShell,
linuxPackageMappings,
linuxPackageSymlinks,
linuxScriptReplacements,
packageArchitecture
}
import linux.{LinuxFileMetaData, LinuxPackageMapping, LinuxSymlink}
import linux.LinuxPlugin.Users
import universal.Archives
import archetypes.TemplateWriter
import SbtNativePackager.{Linux, Universal}
/**
* == Debian Plugin ==
*
* This plugin provides the ability to build ''.deb'' packages.
*
* == Configuration ==
*
* In order to configure this plugin take a look at the available [[com.typesafe.sbt.packager.debian.DebianKeys]]
*
* @example Enable the plugin in the `build.sbt`. By default this will use
* the native debian packaging implementation [[com.typesafe.sbt.packager.debian.DebianNativePackaging]].
* {{{
* enablePlugins(DebianPlugin)
* }}}
*
*/
object DebianPlugin extends AutoPlugin with DebianNativePackaging {
override def requires = linux.LinuxPlugin
object autoImport extends DebianKeys {
val Debian = config("debian") extend Linux
val DebianConstants = Names
}
import autoImport._
/** Debian constants */
object Names {
val DebianSource = "debian"
val DebianMaintainerScripts = "DEBIAN"
//maintainer script names
val Postinst = "postinst"
val Postrm = "postrm"
val Prerm = "prerm"
val Preinst = "preinst"
val Control = "control"
val Conffiles = "conffiles"
val Changelog = "changelog"
val Files = "files"
}
val CHOWN_REPLACEMENT = "chown-paths"
override def projectConfigurations: Seq[Configuration] = Seq(Debian)
// TODO maybe we can put settings/debiansettings together
/**
* Enables native packaging by default
*/
override lazy val projectSettings = settings ++ debianSettings ++ debianNativeSettings
/**
* the default debian settings for the debian namespaced settings
*/
private def settings = Seq(
/* ==== Debian default settings ==== */
debianPriority := "optional",
debianSection := "java",
debianPackageConflicts := Seq.empty,
debianPackageDependencies := Seq.empty,
debianPackageProvides := Seq.empty,
debianPackageRecommends := Seq.empty,
debianSignRole := "builder",
target in Debian <<= (target, name in Debian, version in Debian) apply ((t, n, v) => t / (n + "-" + v)),
name in Debian <<= (name in Linux),
// TODO maybe we can remove this, with the projectConfigurations
maintainerScripts in Debian <<= (maintainerScripts in Linux),
packageName in Debian <<= (packageName in Linux),
executableScriptName in Debian <<= (executableScriptName in Linux),
version in Debian <<= (version in Linux),
linuxPackageMappings in Debian <<= linuxPackageMappings,
packageDescription in Debian <<= packageDescription in Linux,
packageSummary in Debian <<= packageSummary in Linux,
maintainer in Debian <<= maintainer in Linux,
// override the linux sourceDirectory setting
sourceDirectory in Debian <<= sourceDirectory,
/* ==== Debian configuration settings ==== */
debianControlScriptsDirectory <<= (sourceDirectory) apply (_ / "debian" / Names.DebianMaintainerScripts),
debianMaintainerScripts := Seq.empty,
debianMakePreinstScript := None,
debianMakePrermScript := None,
debianMakePostinstScript := None,
debianMakePostrmScript := None,
debianChangelog := None,
/* === new debian scripts implementation */
maintainerScripts in Debian := {
val replacements = (linuxScriptReplacements in Debian).value
val scripts = Map(
Names.Prerm -> defaultMaintainerScript(Names.Prerm).toSeq.flatten,
Names.Preinst -> defaultMaintainerScript(Names.Preinst).toSeq.flatten,
Names.Postinst -> defaultMaintainerScript(Names.Postinst).toSeq.flatten,
Names.Postrm -> defaultMaintainerScript(Names.Postrm).toSeq.flatten
)
// this is for legacy purposes to keep old behaviour
// --- legacy starts
def readContent(scriptFiles: Seq[(File, String)]): Map[String, Seq[String]] =
scriptFiles.map {
case (scriptFile, scriptName) =>
scriptName -> IO.readLines(scriptFile)
}.toMap
val userProvided = readContent(
Seq(
debianMakePreinstScript.value.map(script => script -> Names.Preinst),
debianMakePostinstScript.value.map(script => script -> Names.Postinst),
debianMakePrermScript.value.map(script => script -> Names.Prerm),
debianMakePostrmScript.value.map(script => script -> Names.Postrm)
).flatten
)
// these things get appended. Don't check for nonexisting keys as they are already in the default scripts map
val appendedScripts = scripts.map {
case (scriptName, content) =>
scriptName -> (content ++ userProvided.getOrElse(scriptName, Nil))
}
// override and merge with the user defined scripts. Will change in the future
val controlScriptsDir = debianControlScriptsDirectory.value
val overridenScripts = scripts ++ readContent(
Seq(
scriptMapping(Names.Prerm, debianMakePrermScript.value, controlScriptsDir),
scriptMapping(Names.Preinst, debianMakePreinstScript.value, controlScriptsDir),
scriptMapping(Names.Postinst, debianMakePostinstScript.value, controlScriptsDir),
scriptMapping(Names.Postrm, debianMakePostrmScript.value, controlScriptsDir)
).flatten
)
// --- legacy ends
// TODO remove the overridenScripts
val content = appendedScripts ++ overridenScripts
// apply all replacements
content.mapValues { lines =>
TemplateWriter.generateScriptFromLines(lines, replacements)
}
},
debianMaintainerScripts := generateDebianMaintainerScripts(
(maintainerScripts in Debian).value,
(linuxScriptReplacements in Debian).value,
(target in Universal).value
),
debianNativeBuildOptions := Nil
)
// format: off
/**
* == Debian scoped settings ==
* Everything used inside the debian scope
*
*/
private def debianSettings: Seq[Setting[_]] = inConfig(Debian)(
Seq(
packageArchitecture := "all",
debianPackageInfo <<= (packageName,
version,
maintainer,
packageSummary,
packageDescription) apply PackageInfo,
debianPackageMetadata <<= (debianPackageInfo,
debianPriority,
packageArchitecture,
debianSection,
debianPackageConflicts,
debianPackageDependencies,
debianPackageProvides,
debianPackageRecommends) apply PackageMetaData,
debianPackageInstallSize <<= linuxPackageMappings map { mappings =>
(for {
LinuxPackageMapping(files, _, zipped) <- mappings
(file, _) <- files
if !file.isDirectory && file.exists
// TODO - If zipped, heuristically figure out a reduction factor.
} yield file.length).sum / 1024
},
debianControlFile <<= (debianPackageMetadata,
debianPackageInstallSize,
target) map { (data, size, dir) =>
if (data.info.description == null || data.info.description.isEmpty) {
sys.error(
"""packageDescription in Debian cannot be empty. Use
packageDescription in Debian := "My package Description""""
)
}
val cfile = dir / Names.DebianMaintainerScripts / Names.Control
IO.write(cfile,
data.makeContent(size),
java.nio.charset.Charset.defaultCharset)
chmod(cfile, "0644")
cfile
},
debianConffilesFile <<= (linuxPackageMappings, target) map {
(mappings, dir) =>
val cfile = dir / Names.DebianMaintainerScripts / Names.Conffiles
val conffiles = for {
LinuxPackageMapping(files, meta, _) <- mappings
if meta.config != "false"
(file, name) <- files
if file.isFile
} yield name
IO.writeLines(cfile, conffiles)
chmod(cfile, "0644")
cfile
},
debianMD5sumsFile <<= (debianExplodedPackage, target) map {
(mappings, dir) =>
val md5file = dir / Names.DebianMaintainerScripts / "md5sums"
val md5sums = for {
(file, name) <- (dir.*** --- dir pair relativeTo(dir))
if file.isFile
if !(name startsWith Names.DebianMaintainerScripts)
if !(name contains "debian-binary")
// TODO - detect symlinks with Java7 (when we can) rather than hackery...
if file.getCanonicalPath == file.getAbsolutePath
fixedName = if (name startsWith "/") name drop 1 else name
} yield Hashing.md5Sum(file) + " " + fixedName
IO.writeLines(md5file, md5sums)
chmod(md5file, "0644")
md5file
},
debianMakeChownReplacements <<= (linuxPackageMappings, streams) map makeChownReplacements,
debianExplodedPackage <<= (linuxPackageMappings,
debianControlFile,
debianMaintainerScripts,
debianConffilesFile,
debianChangelog,
linuxScriptReplacements,
debianMakeChownReplacements,
linuxPackageSymlinks,
target,
streams)
map {
(mappings, _, maintScripts, _, changelog, replacements, chown,
symlinks, t, streams) =>
// Create files and directories
mappings foreach {
case LinuxPackageMapping(paths, perms, zipped) =>
val (dirs, files) = paths.partition(_._1.isDirectory)
dirs map {
case (_, name) => t / name
} foreach { targetDir =>
targetDir mkdirs ()
chmod(targetDir, perms.permissions)
}
files map {
case (file, name) => (file, t / name)
} foreach {
case (source, target) =>
copyAndFixPerms(source, target, perms, zipped)
}
}
// Now generate relative symlinks
LinuxSymlink.makeSymLinks(symlinks, t, false)
// Put the maintainer files in `dir / "DEBIAN"` named as specified.
// Valid values for the name are preinst,postinst,prerm,postrm
for ((file, name) <- maintScripts) {
val targetFile = t / Names.DebianMaintainerScripts / name
copyAndFixPerms(file, targetFile, LinuxFileMetaData())
filterAndFixPerms(targetFile,
chown +: replacements,
LinuxFileMetaData())
}
t
},
// Replacement for ${{header}} as debian control scripts are bash scripts
linuxScriptReplacements += ("header" -> "#!/bin/sh\\nset -e")
// Adding package specific implementation settings
)
)
// format: on
}
/**
* == Debian Helper Methods ==
*
* This trait provides a set of helper methods for debian packaging
* implementations.
*
* Most of the methods are for java 6 file permission handling and
* debian script adjustements.
*
*/
trait DebianPluginLike {
/** validate group and usernames for debian systems */
val UserNamePattern = "^[a-z][-a-z0-9_]*$".r
private[debian] final def generateDebianMaintainerScripts(scripts: Map[String, Seq[String]],
replacements: Seq[(String, String)],
tmpDir: File): Seq[(File, String)] =
scripts.map {
case (scriptName, content) =>
val scriptBits =
TemplateWriter.generateScriptFromLines(content, replacements)
val script = tmpDir / "tmp" / "debian" / scriptName
IO.write(script, scriptBits mkString "\\n")
script -> scriptName
}.toList
private[debian] final def defaultMaintainerScript(name: String): Option[List[String]] = {
val url = Option(getClass getResource s"$name-template")
url.map(source => IO.readLinesURL(source))
}
private[debian] final def copyAndFixPerms(from: File,
to: File,
perms: LinuxFileMetaData,
zipped: Boolean = false): Unit = {
if (zipped) {
IO.withTemporaryDirectory { dir =>
val tmp = dir / from.getName
IO.copyFile(from, tmp)
val zipped = Archives.gzip(tmp)
IO.copyFile(zipped, to, true)
}
} else IO.copyFile(from, to, true)
// If we have a directory, we need to alter the perms.
chmod(to, perms.permissions)
// TODO - Can we do anything about user/group ownership?
}
private[debian] final def filterAndFixPerms(script: File,
replacements: Seq[(String, String)],
perms: LinuxFileMetaData): File = {
val filtered =
TemplateWriter.generateScript(script.toURI.toURL, replacements)
IO.delete(script)
IO.write(script, filtered)
chmod(script, perms.permissions)
script
}
private[debian] final def prependAndFixPerms(script: File, lines: Seq[String], perms: LinuxFileMetaData): File = {
val old = IO.readLines(script)
IO.writeLines(script, lines ++ old, append = false)
chmod(script, perms.permissions)
script
}
private[debian] final def appendAndFixPerms(script: File, lines: Seq[String], perms: LinuxFileMetaData): File = {
IO.writeLines(script, lines, append = true)
chmod(script, perms.permissions)
script
}
private[debian] final def createFileIfRequired(script: File, perms: LinuxFileMetaData): File = {
if (!script.exists()) {
script.createNewFile()
chmod(script, perms.permissions)
}
script
}
private[debian] final def validateUserGroupNames(user: String, streams: TaskStreams) {
if ((UserNamePattern findFirstIn user).isEmpty) {
streams.log.warn(
"The user or group '" + user + "' may contain invalid characters for Debian based distributions"
)
}
if (user.length > 32) {
streams.log.warn(
"The length of '" + user + "' must be not be greater than 32 characters for Debian based distributions."
)
}
}
@deprecated("Will be removed", "1.0.3")
private[debian] def scriptMapping(scriptName: String,
script: Option[File],
controlDir: File): Option[(File, String)] =
(script, controlDir) match {
// check if user defined script exists
case (_, dir) if (dir / scriptName).exists =>
Some(file((dir / scriptName).getAbsolutePath) -> scriptName)
// create mappings for generated script
case (scr, _) => scr.map(_ -> scriptName)
}
/**
* Debian assumes the application chowns the necessary files and directories in the
* control scripts (Pre/Postinst).
*
* This method generates a replacement which can be inserted in bash script to chown
* all files which are not root. While adding the chown commands it checks if the users
* and groups have valid names.
*
* @param mappings - all mapped files
* @param streams - logging
* @return (CHOWN_REPLACEMENT -> ".. list of chown commands")
*/
private[debian] def makeChownReplacements(mappings: Seq[LinuxPackageMapping],
streams: TaskStreams): (String, String) = {
// how to create the chownCmd. TODO maybe configurable?
def chownCmd(user: String, group: String)(path: String): String =
s"chown $user:$group $path"
val header = "# Chown definitions created by SBT Native Packager\\n"
// Check for non root user/group and create chown commands
// filter all root mappings, map to (user,group) key, group by, append everything
val chowns = mappings filter {
case LinuxPackageMapping(_, LinuxFileMetaData(Users.Root, Users.Root, _, _, _), _) =>
false
case _ => true
} map {
case LinuxPackageMapping(paths, meta, _) =>
(meta.user, meta.group) -> paths
} groupBy (_._1) map {
case ((user, group), pathList) =>
validateUserGroupNames(user, streams)
validateUserGroupNames(group, streams)
val chown = chownCmd(user, group) _
// remove key, flatten it and then use mapping path (_.2) to create chown command
pathList.map(_._2).flatten map (m => chown(m._2))
}
val replacement = header :: chowns.flatten.toList mkString "\\n"
DebianPlugin.CHOWN_REPLACEMENT -> replacement
}
private[debian] def archiveFilename(appName: String, version: String, arch: String): String =
appName + "_" + version + "_" + arch + ".deb"
private[debian] def changesFilename(appName: String, version: String, arch: String): String =
appName + "_" + version + "_" + arch + ".changes"
}
object DebianDeployPlugin extends AutoPlugin {
import DebianPlugin.autoImport._
override def requires = DebianPlugin
override def projectSettings =
SettingsHelper.makeDeploymentSettings(Debian, packageBin in Debian, "deb") ++
SettingsHelper.makeDeploymentSettings(Debian, genChanges in Debian, "changes")
}
|
kodemaniak/sbt-native-packager
|
src/main/scala/com/typesafe/sbt/packager/debian/DebianPlugin.scala
|
Scala
|
bsd-2-clause
| 18,116
|
/*
* ImportanceTest.scala
* Importance sampling tests.
*
* Created By: Avi Pfeffer (apfeffer@cra.com)
* Creation Date: Jan 1, 2009
*
* Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email figaro@cra.com for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.test.algorithm.sampling
import org.scalatest._
import org.scalatest.Matchers
import com.cra.figaro.algorithm._
import com.cra.figaro.algorithm.sampling.Importance.Reject
import com.cra.figaro.algorithm.sampling._
import com.cra.figaro.language._
import com.cra.figaro.library.atomic.continuous._
import com.cra.figaro.library.atomic._
import com.cra.figaro.library.atomic.discrete.Binomial
import com.cra.figaro.library.compound._
import com.cra.figaro.test._
import com.cra.figaro.util.logSum
import JSci.maths.statistics._
import com.cra.figaro.test.tags.Performance
import com.cra.figaro.test.tags.NonDeterministic
import scala.language.reflectiveCalls
import org.scalatest.Matchers
import org.scalatest.{ PrivateMethodTester, WordSpec }
import scala.collection.mutable.Set
class ImportanceTest extends WordSpec with Matchers with PrivateMethodTester {
"Sampling a value of a single element" should {
"reject sampling process if condition violated" in {
Universe.createNew()
val target = Flip(1.0)
target.observe(false)
val numTrials = 100000
val tolerance = 0.01
val imp = Importance(target)
an[RuntimeException] should be thrownBy { imp.lw.traverse(List((target, None, None)), List(), 0.0, Set()) }
}
"sample normally if observations match" in {
Universe.createNew()
val target = Flip(0.7)
target.observe(false)
val numTrials = 100000
val tolerance = 0.01
val imp = Importance(target)
imp.lw.traverse(List((target, Some(false), None)), List(), 0.0, Set())
target.value should equal(false)
}
"for a Constant return the constant with probability 1" in {
Universe.createNew()
val c = Constant(8)
sampleOneTest(c, (i: Int) => i == 8, 1.0)
}
"for a Uniform return a range with probability proportional to the size of the range" in {
Universe.createNew()
val u = Uniform(0.2, 1.0)
sampleOneTest(u, (d: Double) => 0.3 <= d && d < 0.5, 0.25)
}
"for a simple Flip return true with probability of the argument" in {
Universe.createNew()
val f = Flip(0.3)
sampleOneTest(f, (b: Boolean) => b, 0.3)
}
"for a complex Flip return true with probability equal to the expectation of the argument" in {
Universe.createNew()
val f = Flip(Uniform(0.2, 1.0))
sampleOneTest(f, (b: Boolean) => b, 0.6)
}
"for a Select with simple probabilities return an outcome with the correct probability" in {
Universe.createNew()
val s = Select(0.2 -> 1, 0.3 -> 2, 0.5 -> 3)
sampleOneTest(s, (i: Int) => i == 2, 0.3)
}
"for a Select with complex probabilities return an outcome with the correct probability" in {
Universe.createNew()
val s = Select(Select(0.25 -> 0.2, 0.75 -> 0.8) -> 1, Constant(0.4) -> 2)
sampleOneTest(s, (i: Int) => i == 2, 0.25 * 0.4 / 0.6 + 0.75 * 0.4 / 1.2)
}
"for a Dist with simple probabilities return an outcome with the correct probability" in {
Universe.createNew()
val d = Dist(0.2 -> Constant(true), 0.8 -> Flip(0.3))
sampleOneTest(d, (b: Boolean) => b, 0.2 + 0.8 * 0.3)
}
"for a Dist with complex probabilities return an outcome with the correct probability" in {
Universe.createNew()
val d = Dist(Select(0.25 -> 0.2, 0.75 -> 0.8) -> Constant(true), Constant(0.4) -> Flip(0.3))
val p = 0.25 * (0.2 / 0.6 + (0.4 / 0.6) * 0.3) + 0.75 * (0.8 / 1.2 + (0.4 / 1.2) * 0.3)
sampleOneTest(d, (b: Boolean) => b, p)
}
"for an Apply with one argument return an outcome with probability equal to the sum of its inverse images" in {
Universe.createNew()
val a = Apply(Select(0.2 -> 1, 0.3 -> 2, 0.5 -> 3), (i: Int) => i > 1)
sampleOneTest(a, (b: Boolean) => b, 0.8)
}
"for an Apply with two arguments return an outcome with probability equal to the sum of its inverse images" in {
Universe.createNew()
val a = Apply(Select(0.5 -> 1, 0.5 -> 2), Select(0.2 -> 1, 0.3 -> 2, 0.5 -> 3),
(i: Int, j: Int) => j > i)
sampleOneTest(a, (b: Boolean) => b, 0.5 * (0.3 + 0.5) + 0.5 * 0.5)
}
"for an NonCaching two parent Chain return an outcome with probability equal to the expectation over the parent" in {
Universe.createNew()
val i1 = Select(0.4 -> 1, 0.6 -> 2)
val i2 = Constant(2)
val c = NonCachingChain(Flip(0.3), Flip(0.8), (b1: Boolean, b2: Boolean) => if (b1 && b2) i1; else i2)
sampleOneTest(c, (i: Int) => i == 2, (0.3 * 0.8) * 0.6 + (0.7 * 0.2 + 0.3 * 0.2 + 0.7 * 0.8))
}
"for an NonCachingChain return an outcome with probability equal to the expectation over the parents" in {
Universe.createNew()
val i1 = Select(0.4 -> 1, 0.6 -> 2)
val i2 = Constant(2)
val c = NonCachingChain(Flip(0.3), (b: Boolean) => if (b) i1; else i2)
sampleOneTest(c, (i: Int) => i == 2, 0.3 * 0.6 + 0.7)
}
"for a Caching two parent Chain return an outcome with probability equal to the expectation over the parent" in {
Universe.createNew()
val i1 = Select(0.4 -> 1, 0.6 -> 2)
val i2 = Constant(2)
val c = CachingChain(Flip(0.3), Flip(0.8), (b1: Boolean, b2: Boolean) => if (b1 && b2) i1; else i2)
sampleOneTest(c, (i: Int) => i == 2, (0.3 * 0.8) * 0.6 + (0.7 * 0.2 + 0.3 * 0.2 + 0.7 * 0.8))
}
"for a CachingChain return an outcome with probability equal to the expectation over the parent" in {
Universe.createNew()
val i1 = Select(0.4 -> 1, 0.6 -> 2)
val i2 = Constant(2)
val c = CachingChain(Flip(0.3), (b: Boolean) => if (b) i1; else i2)
sampleOneTest(c, (i: Int) => i == 2, 0.3 * 0.6 + 0.7)
}
"for an Inject return an outcome with probability equal to the probability of its inverse image" in {
Universe.createNew()
val u1 = Uniform(0.0, 2.0)
val u2 = Constant(1.5)
val i = Inject(u1, u2)
sampleOneTest(i, (d: Seq[Double]) => d.length == 2 && 0.5 <= d(0) && d(0) < 1.0 && d(1) == 1.5, 0.25)
}
"for an If with simple consequents return a consequent with probability equal to the test" in {
Universe.createNew()
val i = If(Flip(0.3), 1, 2)
sampleOneTest(i, (i: Int) => i == 2, 0.7)
}
"with a condition on the element return the correct conditional probability" in {
Universe.createNew()
val u = Uniform(0.2, 1.0)
u.setCondition((d: Double) => 0.25 <= d && d < 0.65)
sampleOneTest(u, (d: Double) => 0.3 <= d && d < 0.5, 0.5)
}
"with a constraint on the element return the correct probability taking into account the constraint" in {
Universe.createNew()
val u = Uniform(0.2, 1.0)
def constraint(d: Double) = if (0.3 <= d && d < 0.5) 3.0; else 1.0
u.setConstraint(constraint)
sampleOneTest(u, (d: Double) => 0.3 <= d && d < 0.5, 0.5)
}
"with a condition on a related element return the correct conditional probability" in {
Universe.createNew()
val u = Uniform(0.2, 1.0)
u.setCondition((d: Double) => 0.25 <= d && d < 0.65)
val f = Flip(u)
sampleOneTest(f, (b: Boolean) => b, 0.45)
}
"with a constraint on a related element return the correct probability taking into account the constraint" in {
Universe.createNew()
val u = Uniform(0.0, 1.0)
u.setConstraint((d: Double) => d)
val f = Flip(u)
// Expected value of flip argument is (\\int_0^1 x^2 dx) / (\\int_0^1 x dx) = 2/3
sampleOneTest(f, (b: Boolean) => b, 2.0 / 3.0)
}
"correctly resample an element's arguments when the arguments change during samples" in {
Universe.createNew()
class Test {
val count = discrete.Uniform(1, 2)
val array = MakeList(count, () => Flip(.9))
}
val test = Constant(new Test)
val c = Chain(test, (t: Test) => {
val B = Inject(t.array)
Apply(B, (b: List[List[Boolean]]) => b.head)
})
val alg = Importance(1, c)
alg.lw.computeWeight(List(c))
c.value.asInstanceOf[List[Boolean]].head should be(true || false)
}
}
"Producing a weighted sample of an element in a universe" should {
"with no conditions or constraints produce the same result as sampling the element individually" in {
Universe.createNew()
val u = Uniform(0.2, 1.0)
val f = Flip(u)
val a = If(f, Select(0.3 -> 1, 0.7 -> 2), Constant(2))
weightedSampleTest(f, (b: Boolean) => b, 0.6)
}
"with a condition on a dependent element produce the result with the correct probability" in {
Universe.createNew()
val u = Uniform(0.2, 1.0)
val f = Flip(u)
val a = If(f, Select(0.3 -> 1, 0.7 -> 2), Constant(2))
a.setCondition((i: Int) => i == 2)
// U(true) = \\int_{0.2}^{1.0) 0.7 p = 0.35 * 0.96
// U(false) = \\int_{0.2}^{1.0) (1-p)
val u1 = 0.35 * 0.96
val u2 = 0.32
weightedSampleTest(f, (b: Boolean) => b, u1 / (u1 + u2))
}
"with a constraint on a dependent element produce the result with the correct probability" in {
Universe.createNew()
val u = Uniform(0.2, 1.0)
val f = Flip(u)
val a = If(f, Select(0.3 -> 1, 0.7 -> 2), Constant(2))
a.setConstraint((i: Int) => i.toDouble)
// U(true) = \\int_{0.2}^{1.0} (0.3 + 2 * 0.7) p = 0.85 * 0.96
// U(false) = \\int_{0.2}^(1.0) (2 * (1-p)) = 0.64
val u1 = 0.85 * 0.96
val u2 = 0.64
weightedSampleTest(f, (b: Boolean) => b, u1 / (u1 + u2))
}
"with an element that uses another element multiple times, " +
"always produce the same value for the different uses" in {
Universe.createNew()
val f = Flip(0.5)
val e = f === f
weightedSampleTest(e, (b: Boolean) => b, 1.0)
}
"with a constraint on an element that is used multiple times, only factor in the constraint once" in {
Universe.createNew()
val f1 = Flip(0.5)
val f2 = Flip(0.3)
val e1 = f1 === f1
val e2 = f1 === f2
val d = Dist(0.5 -> e1, 0.5 -> e2)
f1.setConstraint((b: Boolean) => if (b) 3.0; else 2.0)
// Probability that f1 is true = 0.6
// Probability that e1 is true = 1.0
// Probability that e2 is true = 0.6 * 0.3 + 0.4 * 0.7 = 0.46
// Probability that d is true = 0.5 * 1 + 0.5 * 0.46 = 0.73
weightedSampleTest(d, (b: Boolean) => b, 0.73)
}
"with an observation on a compound flip, terminate quickly and produce the correct result" taggedAs (NonDeterministic) in {
// Tests the likelihood weighting implementation for compound flip
Universe.createNew()
val b = Uniform(0.0, 1.0)
val f1 = Flip(b)
val f2 = Flip(b)
val f3 = Flip(b)
val f4 = Flip(b)
val f5 = Flip(b)
val f6 = Flip(b)
val f7 = Flip(b)
val f8 = Flip(b)
val f9 = Flip(b)
val f10 = Flip(b)
val f11 = Flip(b)
val f12 = Flip(b)
val f13 = Flip(b)
val f14 = Flip(b)
val f15 = Flip(b)
val f16 = Flip(b)
val f17 = Flip(b)
val f18 = Flip(b)
val f19 = Flip(b)
val f20 = Flip(b)
f1.observe(true)
f2.observe(true)
f3.observe(true)
f4.observe(true)
f5.observe(true)
f6.observe(true)
f7.observe(true)
f8.observe(true)
f9.observe(true)
f10.observe(true)
f11.observe(true)
f12.observe(true)
f13.observe(true)
f14.observe(true)
f15.observe(true)
f16.observe(true)
f17.observe(false)
f18.observe(false)
f19.observe(false)
f20.observe(false)
val alg = Importance(b)
alg.start()
Thread.sleep(100)
val time0 = System.currentTimeMillis()
alg.stop()
// Uniform(0,1) is beta(1,1)
// Result is beta(1 + 16,1 + 4)
// Expectation is (alpha) / (alpha + beta) = 17/22
alg.expectation(b, (d: Double) => d) should be((17.0 / 22.0) +- 0.02)
val time1 = System.currentTimeMillis()
// If likelihood weighting is working, stopping and querying the algorithm should be almost instantaneous
// If likelihood weighting is not working, stopping and querying the algorithm requires waiting for a non-rejected sample
(time1 - time0) should be <= (500L)
alg.shutdown
}
"with an observation on a parameterized flip, terminate quickly and produce the correct result" taggedAs (NonDeterministic) in {
// Tests the likelihood weighting implementation for compound flip
Universe.createNew()
val b = Beta(2.0, 5.0)
val f1 = Flip(b)
val f2 = Flip(b)
val f3 = Flip(b)
val f4 = Flip(b)
val f5 = Flip(b)
val f6 = Flip(b)
val f7 = Flip(b)
val f8 = Flip(b)
val f9 = Flip(b)
val f10 = Flip(b)
val f11 = Flip(b)
val f12 = Flip(b)
val f13 = Flip(b)
val f14 = Flip(b)
val f15 = Flip(b)
val f16 = Flip(b)
val f17 = Flip(b)
val f18 = Flip(b)
val f19 = Flip(b)
val f20 = Flip(b)
f1.observe(true)
f2.observe(true)
f3.observe(true)
f4.observe(true)
f5.observe(true)
f6.observe(true)
f7.observe(true)
f8.observe(true)
f9.observe(true)
f10.observe(true)
f11.observe(true)
f12.observe(true)
f13.observe(true)
f14.observe(true)
f15.observe(true)
f16.observe(true)
f17.observe(false)
f18.observe(false)
f19.observe(false)
f20.observe(false)
val alg = Importance(b)
alg.start()
Thread.sleep(100)
val time0 = System.currentTimeMillis()
alg.stop()
// Result is beta(2 + 16,5 + 4)
// Expectation is (alpha) / (alpha + beta) = 18/27
alg.expectation(b, (d: Double) => d) should be((18.0 / 27.0) +- 0.02)
val time1 = System.currentTimeMillis()
// If likelihood weighting is working, stopping and querying the algorithm should be almost instantaneous
// If likelihood weighting is not working, stopping and querying the algorithm requires waiting for a non-rejected sample
(time1 - time0) should be <= (500L)
alg.shutdown
}
"with an observation on a parameterized binomial, terminate quickly and produce the correct result" in {
// Tests the likelihood weighting implementation for chain
Universe.createNew()
val beta = Beta(2.0, 5.0)
val bin = Binomial(2000, beta)
bin.observe(1600)
val alg = Importance(beta)
alg.start()
Thread.sleep(1000)
val time0 = System.currentTimeMillis()
alg.stop()
// Result is beta(2 + 1600,5 + 400)
// Expectation is (alpha) / (alpha + beta) = 1602/2007
alg.expectation(beta, (d: Double) => d) should be((1602.0 / 2007.0) +- 0.02)
val time1 = System.currentTimeMillis()
// If likelihood weighting is working, stopping and querying the algorithm should be almost instantaneous
// If likelihood weighting is not working, stopping and querying the algorithm requires waiting for a non-rejected sample
(time1 - time0) should be <= (500L)
alg.shutdown
}
"with an observation on a chain, terminate quickly and produce the correct result" in {
// Tests the likelihood weighting implementation for chain
Universe.createNew()
val beta = Uniform(0.0, 1.0)
val bin = Binomial(2000, beta)
bin.observe(1600)
val alg = Importance(beta)
alg.start()
Thread.sleep(1000)
val time0 = System.currentTimeMillis()
alg.stop()
// uniform(0,1) is beta(1,1)
// Result is beta(1 + 1600,1 + 400)
// Expectation is (alpha) / (alpha + beta) = 1601/2003
alg.expectation(beta, (d: Double) => d) should be((1601.0 / 2003.0) +- 0.02)
val time1 = System.currentTimeMillis()
// If likelihood weighting is working, stopping and querying the algorithm should be almost instantaneous
// If likelihood weighting is not working, stopping and querying the algorithm requires waiting for a non-rejected sample
(time1 - time0) should be <= (500L)
alg.shutdown
}
"with an observation on a dist, terminate quickly and produce the correct result" in {
// Tests the likelihood weighting implementation for dist
Universe.createNew()
val beta = Beta(2.0, 5.0)
val dist = Dist(0.5 -> Constant(1000), 0.5 -> Binomial(2000, beta))
dist.observe(1600) // forces it to choose bin, and observation should propagate to it
val alg = Importance(beta)
alg.start()
Thread.sleep(1000)
val time0 = System.currentTimeMillis()
alg.stop()
// Result is beta(2 + 1600,5 + 400)
// Expectation is (alpha) / (alpha + beta) = 1602/2007
alg.expectation(beta, (d: Double) => d) should be((1602.0 / 2007.0) +- 0.02)
val time1 = System.currentTimeMillis()
// If likelihood weighting is working, stopping and querying the algorithm should be almost instantaneous
// If likelihood weighting is not working, stopping and querying the algorithm requires waiting for a non-rejected sample
(time1 - time0) should be <= (500L)
alg.shutdown
}
}
"Running importance sampling" should {
"produce the correct answer each time when run twice with different conditions" in {
Universe.createNew()
val s = Select(0.5 -> 0.3, 0.5 -> 0.6)
val f = Flip(s)
val i = Importance(20000, f)
s.observe(0.3)
i.start()
i.probability(f, true) should be(0.3 +- 0.01)
i.kill()
s.observe(0.6)
i.start()
i.probability(f, true) should be(0.6 +- 0.01)
i.kill()
}
/* These tests are no longer valid. Since there is a hidden dependency, we can't support this */
/*
"resample elements inside class defined in a chain" in {
Universe.createNew()
class temp {
val t1 = Flip(0.9)
}
val a = CachingChain(Constant(0), (i: Int) => Constant(new temp))
val b = Apply(a, (t: temp) => t.t1.value)
val alg = Importance(10000, b)
alg.start
alg.probability(b, true) should be(0.9 +- .01)
alg.kill
}
"resample elements inside class defined in a chain for foward sampling" taggedAs (NonDeterministic) in {
Universe.createNew()
class temp {
val t1 = Flip(0.9)
}
val a = CachingChain(Constant(0), (i: Int) => Constant(new temp))
val b = Apply(a, (t: temp) => t.t1.value)
val prob = List.fill(5000) { Forward(Universe.universe); b.value }
prob.count(_ == true).toDouble / 5000.0 should be(0.9 +- .02)
//alg.probability(b, true) should be (0.9 +- .01)
}
*
*/
"not suffer from stack overflow with small probability of success" taggedAs (Performance) in {
Universe.createNew()
val f = Flip(0.000001)
f.observe(true)
val i = Importance(1, f)
i.start
}
"not suffer from memory leaks" taggedAs (Performance) in {
Universe.createNew()
val c = NonCachingChain(Uniform(0.2, 1.0), (d: Double) => Flip(d))
val i = Importance(1000000, c)
i.start
}
}
"Computing probability of evidence using importance sampling" when {
"given a vanilla model with one condition" should {
"return the probability the condition is satisfied" in {
val universe = Universe.createNew()
val f = Flip(0.7)("f", universe)
probEvidenceTest(0.7, List(NamedEvidence("f", Observation(true))))
}
}
"given a vanilla model with two independent conditions" should {
"return the probability both conditions are satisfied" in {
val universe = Universe.createNew()
val f1 = Flip(0.7)("f1", universe)
val f2 = Flip(0.4)("f2", universe)
probEvidenceTest(0.7 * 0.4, List(NamedEvidence("f1", Observation(true)), NamedEvidence("f2", Observation(true))))
}
}
"given a vanilla mode with two dependent conditions" should {
"return the probability both conditions are jointly satisfied" in {
val universe = Universe.createNew()
val d = Select(0.2 -> 0.6, 0.8 -> 0.9)
val f1 = Flip(d)("f1", universe)
val f2 = Flip(d)("f2", universe)
probEvidenceTest(0.2 * 0.6 * 0.6 + 0.8 * 0.9 * 0.9, List(NamedEvidence("f1", Observation(true)), NamedEvidence("f2", Observation(true))))
}
}
"given a vanilla model with two dependent conditions and a constraint" should {
"return the probability both conditions are satisfied, taking into account the constraint" in {
val universe = Universe.createNew()
val d = Select(0.5 -> 0.6, 0.5 -> 0.9)("d", universe)
d.setConstraint((d: Double) => if (d > 0.7) 0.8; else 0.2)
val f1 = Flip(d)("f1", universe)
val f2 = Flip(d)("f2", universe)
probEvidenceTest(0.2 * 0.6 * 0.6 + 0.8 * 0.9 * 0.9, List(NamedEvidence("f1", Observation(true)), NamedEvidence("f2", Observation(true))))
}
}
"given a simple dist with a condition on the result" should {
"return the expectation over the clauses of the probability the result satisfies the condition" in {
val universe = Universe.createNew()
val d = Dist(0.3 -> Flip(0.6), 0.7 -> Flip(0.9))("d", universe)
probEvidenceTest(0.3 * 0.6 + 0.7 * 0.9, List(NamedEvidence("d", Observation(true))))
}
}
"given a complex dist with a condition on the result" should {
"return the expectation over the clauses of the probability the result satisfies the condition" in {
val universe = Universe.createNew()
val p1 = Select(0.2 -> 0.4, 0.8 -> 0.6)
val p2 = Constant(0.4)
val d = Dist(p1 -> Flip(0.6), p2 -> Flip(0.9))("d", universe)
probEvidenceTest(0.2 * (0.5 * 0.6 + 0.5 * 0.9) + 0.8 * (0.6 * 0.6 + 0.4 * 0.9), List(NamedEvidence("d", Observation(true))))
}
}
"given a continuous uniform with a condition" should {
"return the uniform probability of the condition" in {
val universe = Universe.createNew()
val u = Uniform(0.0, 1.0)("u", universe)
val condition = (d: Double) => d < 0.4
probEvidenceTest(0.4, List(NamedEvidence("u", Condition(condition))))
}
}
"given a caching chain with a condition on the result" should {
"return the expectation over the parent of the probability the result satisfies the condition" in {
val universe = Universe.createNew()
val p1 = Select(0.4 -> 0.3, 0.6 -> 0.9)
val c = CachingChain(p1, (d: Double) => if (d < 0.4) Flip(0.3); else Flip(0.8))("c", universe)
probEvidenceTest(0.4 * 0.3 + 0.6 * 0.8, List(NamedEvidence("c", Observation(true))))
}
}
"given a non-caching chain with a condition on the result" should {
"return the expectation over the parent of the probability the result satisfies the condition" in {
val universe = Universe.createNew()
val p1 = Uniform(0.0, 1.0)
val c = NonCachingChain(p1, (d: Double) => if (d < 0.4) Flip(0.3); else Flip(0.8))("c", universe)
probEvidenceTest(0.4 * 0.3 + 0.6 * 0.8, List(NamedEvidence("c", Observation(true))))
}
}
"given a chain of two arguments whose result is a different element with a condition on the result" should {
"return the correct probability of evidence in the result" in {
val universe = Universe.createNew()
val x = Constant(false)
val y = Constant(false)
val u1 = Uniform(0.0, 1.0)
val u2 = Uniform(0.0, 2.0)
val a = CachingChain(x, y, (x: Boolean, y: Boolean) => if (x || y) u1; else u2)("a", universe)
def condition(d: Double) = d < 0.5
probEvidenceTest(0.25, List(NamedEvidence("a", Condition(condition))))
}
}
"Sampling the posterior" should {
"produce the correct answer for marginals" in {
Universe.createNew()
val u = Uniform(0.2, 1.0)
val f = Flip(u)
val a = If(f, Select(0.3 -> 1, 0.7 -> 2), Constant(2))
a.setConstraint((i: Int) => i.toDouble)
// U(true) = \\int_{0.2}^{1.0} (0.3 + 2 * 0.7) p = 0.85 * 0.96
// U(false) = \\int_{0.2}^(1.0) (2 * (1-p)) = 0.64
val u1 = 0.85 * 0.96
val u2 = 0.64
val pos = Importance.sampleJointPosterior(f)
val probTrue = (pos.take(1000).toList.map(t => t(0).asInstanceOf[Boolean])).count(p => p)
probTrue.toDouble / 1000.0 should be(u1 / (u1 + u2) +- .01)
}
"produce the correct answer for joint" in {
Universe.createNew()
val u = Uniform(0.2, 1.0)
val f = Flip(u)
val a = If(f, Select(0.3 -> 1, 0.7 -> 2), Constant(2))
a.setConstraint((i: Int) => i.toDouble)
val pair = ^^(f,a)
val alg = Importance(20000, pair)
alg.start()
val pos = Importance.sampleJointPosterior(f, a)
val samples = pos.take(5000).toList.map(t => (t(0).asInstanceOf[Boolean], t(1).asInstanceOf[Int]))
val probTrueTwo = samples.count(p => p._1 == true && p._2 == 2).toDouble/5000.0 should be(alg.probability(pair, (true, 2)) +- .01)
val probTrueOne = samples.count(p => p._1 == true && p._2 == 1).toDouble/5000.0 should be(alg.probability(pair, (true, 1)) +- .01)
val probFalseTwo = samples.count(p => p._1 == false && p._2 == 2).toDouble/5000.0 should be(alg.probability(pair, (false, 2)) +- .01)
val probFalseOne = samples.count(p => p._1 == false && p._2 == 1).toDouble/5000.0 should be(alg.probability(pair, (false, 1)) +- .01)
alg.kill()
}
}
}
def weightedSampleTest[T](target: Element[T], predicate: T => Boolean, prob: Double) {
val numTrials = 100000
val tolerance = 0.01
val algorithm = Importance(numTrials, target)
algorithm.start()
algorithm.probability(target, predicate) should be(prob +- tolerance)
}
def sampleOneTest[T](target: Element[T], predicate: T => Boolean, prob: Double) {
val numTrials = 100000
val tolerance = 0.01
val imp = Importance(target)
def attempt(): (Double, T) = {
try {
val weight = imp.lw.computeWeight(List(target))
(weight, target.value.asInstanceOf[T])
} catch {
case Importance.Reject => attempt()
}
}
var totalWeight = Double.NegativeInfinity
var successWeight = Double.NegativeInfinity
for { i <- 1 to numTrials } {
val (weight, value) = attempt()
if (predicate(value)) successWeight = logSum(weight, successWeight)
totalWeight = logSum(weight, totalWeight)
}
math.exp(successWeight - totalWeight) should be(prob +- tolerance)
imp.shutdown
}
def probEvidenceTest(prob: Double, evidence: List[NamedEvidence[_]]) {
val alg = Importance(10000)
alg.start()
alg.probabilityOfEvidence(evidence) should be(prob +- 0.01)
}
}
|
scottcb/figaro
|
Figaro/src/test/scala/com/cra/figaro/test/algorithm/sampling/ImportanceTest.scala
|
Scala
|
bsd-3-clause
| 27,314
|
package mesosphere.marathon.api.validation
import mesosphere.marathon.MarathonSpec
import mesosphere.marathon.core.plugin.PluginManager
import mesosphere.marathon.core.readiness.ReadinessCheck
import mesosphere.marathon.state._
import org.scalatest.{ GivenWhenThen, Matchers }
import scala.collection.immutable.Seq
class AppDefinitionReadinessCheckValidationTest extends MarathonSpec with Matchers with GivenWhenThen {
lazy val validAppDefinition = AppDefinition.validAppDefinition(PluginManager.None)
test("app with 0 readinessChecks is valid") {
val f = new Fixture
Given("an app without readinessChecks")
val app = f.app(
readinessChecks = Nil
)
Then("the app is considered valid")
validAppDefinition(app).isSuccess shouldBe true
}
test("app with 1 readinessCheck is valid") {
val f = new Fixture
Given("an app with one readinessCheck")
val app = f.app(
readinessChecks = Seq(ReadinessCheck())
)
Then("the app is considered valid")
validAppDefinition(app).isSuccess shouldBe true
}
test("app with more than 1 readinessChecks is invalid") {
val f = new Fixture
Given("a app app with more than one readiness checks")
val app = f.app(readinessChecks = Seq(ReadinessCheck(), ReadinessCheck()))
Then("validation fails")
validAppDefinition(app).isFailure shouldBe true
}
test("app with invalid readinessChecks is invalid") {
val f = new Fixture
Given("a app app with an invalid readiness check")
val app = f.app(readinessChecks = Seq(ReadinessCheck(name = "")))
Then("validation fails")
validAppDefinition(app).isFailure shouldBe true
}
test("readinessCheck NOT corresponding to port definitions are invalid") {
val f = new Fixture
Given("a app with ports and a readinessCheck that uses an unknown portName")
val portName = "foo"
val app = f.app(
readinessChecks = Seq(ReadinessCheck(portName = "invalid")),
portDefinitions = Seq(PortDefinition(port = 123, name = Some(portName)))
)
Then("validation fails")
validAppDefinition(app).isFailure shouldBe true
}
test("readinessCheck corresponding to port definitions are valid") {
val f = new Fixture
Given("a app with ports and a readinessCheck that uses an unknown portName")
val portName = "foo"
val app = f.app(
readinessChecks = Seq(ReadinessCheck(portName = portName)),
portDefinitions = Seq(PortDefinition(port = 123, name = Some(portName)))
)
Then("validation fails")
validAppDefinition(app).isSuccess shouldBe true
}
class Fixture {
def app(
readinessChecks: Seq[ReadinessCheck] = Seq(ReadinessCheck()),
portDefinitions: Seq[PortDefinition] = Seq(
PortDefinition(port = 123, name = Some(ReadinessCheck.DefaultPortName))
)): AppDefinition =
AppDefinition(
cmd = Some("sleep 1000"),
instances = 1,
readinessChecks = readinessChecks,
portDefinitions = portDefinitions
)
}
}
|
yp-engineering/marathon
|
src/test/scala/mesosphere/marathon/api/validation/AppDefinitionReadinessCheckValidationTest.scala
|
Scala
|
apache-2.0
| 3,026
|
/* - Coeus web framework -------------------------
*
* Licensed under the Apache License, Version 2.0.
*
* Author: Spiros Tzavellas
*/
package com.tzavellas.coeus.spring.config
import org.junit.Test
import org.junit.Assert._
import javax.servlet.ServletContextEvent
import org.springframework.mock.web.{ MockServletConfig, MockServletContext }
import org.springframework.web.context.ContextLoaderListener
import com.tzavellas.coeus.FrameworkException
import com.tzavellas.coeus.spring.test._
class SpringRegistrarTest {
@Test(expected=classOf[FrameworkException])
def raise_error_when_a_controller_is_not_configured_with_singleton_scope() {
init("/errors-context.xml")
}
@Test
def register_the_controllers_from_spring_context() {
val controllers = init("/web-context.xml").controllers.result
assertTrue(controllers.exists(_.getClass == classOf[BlogController]))
assertTrue(controllers.exists(_.getClass == classOf[PostController]))
}
private def init(configLocation: String) = {
val servletContext = new MockServletContext
servletContext.addInitParameter("contextConfigLocation", configLocation)
val servletConfig = new MockServletConfig(servletContext, "test-servlet")
(new ContextLoaderListener).contextInitialized(new ServletContextEvent(servletContext))
new SpringWebModule(servletConfig)
}
}
|
sptz45/coeus-spring
|
src/test/scala/com/tzavellas/coeus/spring/config/SpringRegistrarTest.scala
|
Scala
|
apache-2.0
| 1,366
|
package com.twitter.finagle.postgres
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.must.Matchers
import org.scalatest.wordspec.AnyWordSpec
/*
* Base class for finagle postgres tests.
*/
class Spec
extends AnyWordSpec
with Matchers
with BeforeAndAfter {
}
|
finagle/finagle-postgres
|
src/test/scala/com/twitter/finagle/postgres/Spec.scala
|
Scala
|
apache-2.0
| 285
|
package com.olvind.crud
package frontend
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import scala.scalajs.js
import scalacss.ScalaCssReact._
object EditorLinkedSingleRow
extends EditorBaseSingleRow
with EditorBaseUpdaterLinked {
final case class Props(
base: EditorBaseProps,
linkedRow: StrLinkedRows,
reload: Callback,
createElem: ReactElement) extends PropsBUL {
def row: StrTableRow =
linkedRow.rows.head
}
final case class State(
validationFails: Map[Option[StrRowId], Seq[ValidationError]],
showCreate: Boolean,
cachedDataOpt: Option[CachedData]) extends StateB[State]{
override def withCachedData(cd: CachedData): State =
copy(cachedDataOpt = cd.some)
override def withValidationFails(rowOpt: Option[StrRowId], ves: Seq[ValidationError]): State =
copy(validationFails = validationFails.updated(rowOpt, ves))
}
private final case class Backend($: BackendScope[Props, State])
extends BackendBUL[Props, State]{
override implicit val r = ComponentUpdates.InferredReusability[Props]
val toggleShowCreate: ReactEvent ⇒ Callback =
e ⇒ $.modState(S ⇒ S.copy(showCreate = !S.showCreate))
override def render(P: Props, S: State): ReactElement = {
val fp = fromProps.value()
<.div(
TableStyle.container,
EditorToolbar(EditorToolbar.Props(
editorDesc = P.editorDesc,
rows = 1,
cachedDataOpt = S.cachedDataOpt,
filterU = js.undefined,
openFilterDialogU = js.undefined,
isLinkedU = P.linkedRow,
refreshU = P.reload,
showAllU = fp.showAllRows,
deleteU = P.row.idOpt.asUndef map deleteRow,
showCreateU = (S.showCreate, toggleShowCreate),
customElemU = js.undefined
)),
<.div(
TableStyle.table,
<.div(
TableStyle.nested,
P.createElem.some.filter(_ ⇒ S.showCreate)
),
forColumns(P.editorDesc, P.row, S.validationFails)(
(t, col, uid, uv, ue) ⇒
<.div(
TableStyle.row,
TableHeaderCell(TableHeaderCell.Props(
col,
t.mainTable,
js.undefined
)),
TableCell(
clearError = clearValidationFail(P.row.idOpt),
cachedDataOpt = S.cachedDataOpt,
updateU = P.row.idOpt.map(updateValue).asUndef,
showSingleRowU = fp.showSingleRow)(
t, col, uid, uv, ue)
)
)
)
)
}
}
private val component =
ReactComponentB[Props]("EditorSingleRow")
.initialState_P(P ⇒ State(Map.empty, showCreate = false, P.base.cachedDataOpt))
.renderBackend[Backend]
.configure(ComponentUpdates.inferred("EditorSingleRow"))
.componentDidMount(_.backend.init)
.build
def apply(p: Props): ReactElement =
component(p)
}
|
elacin/slick-crud
|
crud/js/src/main/scala/com/olvind/crud/frontend/EditorLinkedSingleRow.scala
|
Scala
|
apache-2.0
| 3,154
|
package brown.tracingplane.bdl.compiler
import scala.collection.JavaConverters._
import scala.collection.mutable.Map
import fastparse.all._
import scala.reflect.runtime.universe._
object Ast {
sealed trait FieldType {
def isValid: Boolean = true
}
sealed abstract class ParameterizedType(parameters: List[FieldType]) extends FieldType {
def getParameters(): java.util.List[FieldType] = {
return parameters.asJava
}
override def toString(): String = {
return s"${this.getClass.getSimpleName}<${parameters.mkString(", ")}>"
}
}
case class UserDefinedType(var packageName: String, name: String, var structType: Boolean = false) extends FieldType {
def isStructType(): Boolean = {
return structType
}
def hasPackageName(): Boolean = {
return packageName != null && !packageName.equals("")
}
override def toString(): String = {
if (hasPackageName()) {
return s"$packageName.$name"
} else {
return name
}
}
}
sealed trait BuiltInType extends FieldType
sealed trait PrimitiveType extends BuiltInType {
override def toString(): String = {
val className = this.getClass.getName
return className.substring(BuiltInType.getClass.getName.length(), className.length()-1)
// return this.getClass.getSimpleName // SI-2034
}
}
object BuiltInType {
sealed trait Numeric extends PrimitiveType
sealed trait Float extends Numeric
sealed trait Signed extends Numeric
sealed trait Unsigned extends Numeric
case object taint extends Unsigned
case object bool extends Unsigned
case object int32 extends Unsigned
case object int64 extends Unsigned
case object sint32 extends Signed
case object sint64 extends Signed
case object fixed32 extends Unsigned
case object fixed64 extends Unsigned
case object sfixed32 extends Signed
case object sfixed64 extends Signed
case object float extends Float
case object double extends Float
case object string extends PrimitiveType
case object bytes extends PrimitiveType
case object Counter extends BuiltInType
case class Set(of: FieldType) extends ParameterizedType(List[FieldType](of)) with BuiltInType {
override def isValid(): Boolean = {
of match {
case t: PrimitiveType => return true
case UserDefinedType(_,_,isStructType) => return isStructType
case _ => return false
}
}
}
case class Map(keyType: PrimitiveType, valueType: FieldType) extends ParameterizedType(List[FieldType](keyType, valueType)) with BuiltInType
}
case class FieldDeclaration(fieldtype: FieldType, name: String, index: Int) {
override def toString(): String = {
return s"$fieldtype $name = $index"
}
}
case class StructFieldDeclaration(fieldtype: FieldType, name: String) {
override def toString(): String = {
return s"$fieldtype $name"
}
}
abstract class ObjectDeclaration(val name: String) {
var packageName: String = ""; // Filled in later
def fullyQualifiedName(packageDeclaration: Option[PackageDeclaration]): String = {
packageDeclaration match {
case Some(decl) => return fullyQualifiedName(decl)
case None => return name
}
}
def fullyQualifiedName(packageDeclaration: PackageDeclaration): String = {
return packageDeclaration.getFullyQualifiedBagName(name)
}
}
case class BagDeclaration(override val name: String, fields: Seq[FieldDeclaration]) extends ObjectDeclaration(name) {
fields.sortWith(_.index < _.index)
def getFieldDeclarations(): java.util.List[FieldDeclaration] = {
return fields.asJava
}
}
case class StructDeclaration(override val name: String, fields: Seq[StructFieldDeclaration]) extends ObjectDeclaration(name) {
def getFieldDeclarations(): java.util.List[StructFieldDeclaration] = {
return fields.asJava
}
}
case class PackageDeclaration(packageName: Seq[String]) {
def getPackageName(): java.util.List[String] = {
return packageName.asJava
}
def getPackageNameString(): String = {
return packageName.mkString(".")
}
def getFullyQualifiedBagName(bagName: String): String = {
return (packageName + bagName).mkString(".")
}
}
case class ImportDeclaration(filename: String)
case class BaggageBuffersDeclaration(packageDeclaration: Option[PackageDeclaration], imports: Seq[ImportDeclaration], objectDeclarations: Seq[ObjectDeclaration]) {
val bagDeclarations: Seq[BagDeclaration] = objectDeclarations.filter(_.isInstanceOf[BagDeclaration]).map(_.asInstanceOf[BagDeclaration])
val structDeclarations: Seq[StructDeclaration] = objectDeclarations.filter(_.isInstanceOf[StructDeclaration]).map(_.asInstanceOf[StructDeclaration])
def getObjectDeclarations(): java.util.List[ObjectDeclaration] = {
return objectDeclarations.asJava
}
def getBagDeclarations(): java.util.List[BagDeclaration] = {
return bagDeclarations.asJava
}
def getStructDeclarations(): java.util.List[StructDeclaration] = {
return structDeclarations.asJava
}
def getImportDeclarations(): java.util.List[ImportDeclaration] = {
return imports.asJava
}
def getPackageDeclaration(): PackageDeclaration = {
return packageDeclaration.getOrElse(null)
}
def getPackageNameString(): String = {
packageDeclaration match {
case Some(decl) => return decl.getPackageNameString()
case _ => return ""
}
}
def isEmpty(): Boolean = {
return packageDeclaration.isEmpty && imports.isEmpty && bagDeclarations.isEmpty && structDeclarations.isEmpty
}
}
}
|
tracingplane/tracingplane-java
|
bdl/compiler/src/main/scala/brown/tracingplane/bdl/compiler/Ast.scala
|
Scala
|
bsd-3-clause
| 5,795
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.codegen.agg.batch
import org.apache.flink.streaming.api.operators.OneInputStreamOperator
import org.apache.flink.table.data.binary.BinaryRowData
import org.apache.flink.table.data.{GenericRowData, JoinedRowData, RowData}
import org.apache.flink.table.functions.UserDefinedFunction
import org.apache.flink.table.planner.codegen.{CodeGenUtils, CodeGeneratorContext, ProjectionCodeGenerator}
import org.apache.flink.table.planner.functions.aggfunctions.DeclarativeAggregateFunction
import org.apache.flink.table.planner.plan.utils.AggregateInfoList
import org.apache.flink.table.runtime.generated.GeneratedOperator
import org.apache.flink.table.runtime.operators.TableStreamOperator
import org.apache.flink.table.runtime.operators.aggregate.{BytesHashMap, BytesHashMapSpillMemorySegmentPool}
import org.apache.flink.table.types.logical.RowType
import org.apache.calcite.tools.RelBuilder
/**
* Operator code generator for HashAggregation, Only deal with [[DeclarativeAggregateFunction]]
* and aggregateBuffers should be update(e.g.: setInt) in [[BinaryRowData]].
* (Hash Aggregate performs much better than Sort Aggregate).
*/
class HashAggCodeGenerator(
ctx: CodeGeneratorContext,
builder: RelBuilder,
aggInfoList: AggregateInfoList,
inputType: RowType,
outputType: RowType,
grouping: Array[Int],
auxGrouping: Array[Int],
isMerge: Boolean,
isFinal: Boolean) {
private lazy val groupKeyRowType = AggCodeGenHelper.projectRowType(inputType, grouping)
private lazy val aggCallToAggFunction =
aggInfoList.aggInfos.map(info => (info.agg, info.function))
private lazy val aggregates: Seq[UserDefinedFunction] = aggInfoList.aggInfos.map(_.function)
private lazy val aggArgs: Array[Array[Int]] = aggInfoList.aggInfos.map(_.argIndexes)
// get udagg instance names
private lazy val udaggs = AggCodeGenHelper.getUdaggs(aggregates)
// currently put auxGrouping to aggBuffer in code-gen
private lazy val aggBufferNames = AggCodeGenHelper.getAggBufferNames(auxGrouping, aggregates)
private lazy val aggBufferTypes =
AggCodeGenHelper.getAggBufferTypes(inputType, auxGrouping, aggregates)
private lazy val aggBufferRowType = RowType.of(aggBufferTypes.flatten, aggBufferNames.flatten)
def genWithKeys(): GeneratedOperator[OneInputStreamOperator[RowData, RowData]] = {
val inputTerm = CodeGenUtils.DEFAULT_INPUT1_TERM
val className = if (isFinal) "HashAggregateWithKeys" else "LocalHashAggregateWithKeys"
// add logger
val logTerm = CodeGenUtils.newName("LOG")
ctx.addReusableLogger(logTerm, className)
// gen code to do group key projection from input
val currentKeyTerm = CodeGenUtils.newName("currentKey")
val currentKeyWriterTerm = CodeGenUtils.newName("currentKeyWriter")
val keyProjectionCode = ProjectionCodeGenerator.generateProjectionExpression(
ctx,
inputType,
groupKeyRowType,
grouping,
inputTerm = inputTerm,
outRecordTerm = currentKeyTerm,
outRecordWriterTerm = currentKeyWriterTerm).code
// gen code to create groupKey, aggBuffer Type array
// it will be used in BytesHashMap and BufferedKVExternalSorter if enable fallback
val groupKeyTypesTerm = CodeGenUtils.newName("groupKeyTypes")
val aggBufferTypesTerm = CodeGenUtils.newName("aggBufferTypes")
HashAggCodeGenHelper.prepareHashAggKVTypes(
ctx, groupKeyTypesTerm, aggBufferTypesTerm, groupKeyRowType, aggBufferRowType)
// gen code to aggregate and output using hash map
val aggregateMapTerm = CodeGenUtils.newName("aggregateMap")
val lookupInfo = ctx.addReusableLocalVariable(
classOf[BytesHashMap.LookupInfo].getCanonicalName,
"lookupInfo")
HashAggCodeGenHelper.prepareHashAggMap(
ctx,
groupKeyTypesTerm,
aggBufferTypesTerm,
aggregateMapTerm)
val outputTerm = CodeGenUtils.newName("hashAggOutput")
val (reuseAggMapEntryTerm, reuseGroupKeyTerm, reuseAggBufferTerm) =
HashAggCodeGenHelper.prepareTermForAggMapIteration(
ctx,
outputTerm,
outputType,
groupKeyRowType,
aggBufferRowType,
if (grouping.isEmpty) classOf[GenericRowData] else classOf[JoinedRowData])
val currentAggBufferTerm = ctx.addReusableLocalVariable(
classOf[BinaryRowData].getName, "currentAggBuffer")
val (initedAggBuffer, aggregate, outputExpr) = HashAggCodeGenHelper.genHashAggCodes(
isMerge,
isFinal,
ctx,
builder,
(grouping, auxGrouping),
inputTerm,
inputType,
aggCallToAggFunction,
aggArgs,
aggregates,
currentAggBufferTerm,
aggBufferRowType,
aggBufferTypes,
outputTerm,
outputType,
reuseGroupKeyTerm,
reuseAggBufferTerm)
val outputResultFromMap = HashAggCodeGenHelper.genAggMapIterationAndOutput(
ctx, isFinal, aggregateMapTerm, reuseAggMapEntryTerm, reuseAggBufferTerm, outputExpr)
// gen code to deal with hash map oom, if enable fallback we will use sort agg strategy
val sorterTerm = CodeGenUtils.newName("sorter")
val retryAppend = HashAggCodeGenHelper.genRetryAppendToMap(
aggregateMapTerm, currentKeyTerm, initedAggBuffer, lookupInfo, currentAggBufferTerm)
val (dealWithAggHashMapOOM, fallbackToSortAggCode) = HashAggCodeGenHelper.genAggMapOOMHandling(
isFinal,
ctx,
builder,
(grouping, auxGrouping),
aggCallToAggFunction,
aggArgs,
aggInfoList.aggInfos.map(_.externalResultType),
udaggs,
logTerm,
aggregateMapTerm,
(groupKeyTypesTerm, aggBufferTypesTerm),
(groupKeyRowType, aggBufferRowType),
aggBufferNames,
aggBufferTypes,
outputTerm,
outputType,
outputResultFromMap,
sorterTerm,
retryAppend)
HashAggCodeGenHelper.prepareMetrics(ctx, aggregateMapTerm, if (isFinal) sorterTerm else null)
val lazyInitAggBufferCode = if (auxGrouping.nonEmpty) {
s"""
|// lazy init agg buffer (with auxGrouping)
|${initedAggBuffer.code}
""".stripMargin
} else {
""
}
val processCode =
s"""
| // input field access for group key projection and aggregate buffer update
|${ctx.reuseInputUnboxingCode(inputTerm)}
| // project key from input
|$keyProjectionCode
| // look up output buffer using current group key
|$lookupInfo = $aggregateMapTerm.lookup($currentKeyTerm);
|$currentAggBufferTerm = $lookupInfo.getValue();
|
|if (!$lookupInfo.isFound()) {
| $lazyInitAggBufferCode
| // append empty agg buffer into aggregate map for current group key
| try {
| $currentAggBufferTerm =
| $aggregateMapTerm.append($lookupInfo, ${initedAggBuffer.resultTerm});
| } catch (java.io.EOFException exp) {
| $dealWithAggHashMapOOM
| }
|}
| // aggregate buffer fields access
|${ctx.reuseInputUnboxingCode(currentAggBufferTerm)}
| // do aggregate and update agg buffer
|${aggregate.code}
|""".stripMargin.trim
val endInputCode = if (isFinal) {
val memPoolTypeTerm = classOf[BytesHashMapSpillMemorySegmentPool].getName
s"""
|if ($sorterTerm == null) {
| // no spilling, output by iterating aggregate map.
| $outputResultFromMap
|} else {
| // spill last part of input' aggregation output buffer
| $sorterTerm.sortAndSpill(
| $aggregateMapTerm.getRecordAreaMemorySegments(),
| $aggregateMapTerm.getNumElements(),
| new $memPoolTypeTerm($aggregateMapTerm.getBucketAreaMemorySegments()));
| // only release floating memory in advance.
| $aggregateMapTerm.free(true);
| // fall back to sort based aggregation
| $fallbackToSortAggCode
|}
""".stripMargin
} else {
s"$outputResultFromMap"
}
AggCodeGenHelper.generateOperator(
ctx,
className,
classOf[TableStreamOperator[RowData]].getCanonicalName,
processCode,
endInputCode,
inputType)
}
}
|
hequn8128/flink
|
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/agg/batch/HashAggCodeGenerator.scala
|
Scala
|
apache-2.0
| 9,046
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.controller
import java.util.Properties
import java.util.concurrent.LinkedBlockingQueue
import kafka.common.TopicAndPartition
import kafka.integration.KafkaServerTestHarness
import kafka.server.{KafkaConfig, KafkaServer}
import kafka.utils._
import org.apache.kafka.common.metrics.Metrics
import org.apache.kafka.common.utils.Time
import org.apache.log4j.{Level, Logger}
import org.junit.{After, Before, Test}
import scala.collection.mutable
class ControllerFailoverTest extends KafkaServerTestHarness with Logging {
val log = Logger.getLogger(classOf[ControllerFailoverTest])
val numNodes = 2
val numParts = 1
val msgQueueSize = 1
val topic = "topic1"
val overridingProps = new Properties()
val metrics = new Metrics()
overridingProps.put(KafkaConfig.NumPartitionsProp, numParts.toString)
override def generateConfigs() = TestUtils.createBrokerConfigs(numNodes, zkConnect)
.map(KafkaConfig.fromProps(_, overridingProps))
@Before
override def setUp() {
super.setUp()
}
@After
override def tearDown() {
super.tearDown()
this.metrics.close()
}
/**
* See @link{https://issues.apache.org/jira/browse/KAFKA-2300}
* for the background of this test case
*/
@Test
def testMetadataUpdate() {
log.setLevel(Level.INFO)
var controller: KafkaServer = this.servers.head
// Find the current controller
val epochMap: mutable.Map[Int, Int] = mutable.Map.empty
for (server <- this.servers) {
epochMap += (server.config.brokerId -> server.kafkaController.epoch)
if(server.kafkaController.isActive) {
controller = server
}
}
// Create topic with one partition
kafka.admin.AdminUtils.createTopic(controller.zkUtils, topic, 1, 1)
val topicPartition = TopicAndPartition("topic1", 0)
var partitions = controller.kafkaController.partitionStateMachine.partitionsInState(OnlinePartition)
while (!partitions.contains(topicPartition)) {
partitions = controller.kafkaController.partitionStateMachine.partitionsInState(OnlinePartition)
Thread.sleep(100)
}
// Replace channel manager with our mock manager
controller.kafkaController.controllerContext.controllerChannelManager.shutdown()
val channelManager = new MockChannelManager(controller.kafkaController.controllerContext,
controller.kafkaController.config, metrics)
channelManager.startup()
controller.kafkaController.controllerContext.controllerChannelManager = channelManager
channelManager.shrinkBlockingQueue(0)
channelManager.stopSendThread(0)
// Spawn a new thread to block on the outgoing channel
// queue
val thread = new Thread(new Runnable {
def run() {
try {
controller.kafkaController.sendUpdateMetadataRequest(Seq(0), Set(topicPartition))
log.info("Queue state %d %d".format(channelManager.queueCapacity(0), channelManager.queueSize(0)))
controller.kafkaController.sendUpdateMetadataRequest(Seq(0), Set(topicPartition))
log.info("Queue state %d %d".format(channelManager.queueCapacity(0), channelManager.queueSize(0)))
} catch {
case _: Exception => log.info("Thread interrupted")
}
}
})
thread.setName("mythread")
thread.start()
while (thread.getState() != Thread.State.WAITING) {
Thread.sleep(100)
}
// Assume that the thread is WAITING because it is
// blocked on the queue, so interrupt and move forward
thread.interrupt()
thread.join()
channelManager.resumeSendThread(0)
// Wait and find current controller
var found = false
var counter = 0
while (!found && counter < 10) {
for (server <- this.servers) {
val previousEpoch = epochMap get server.config.brokerId match {
case Some(epoch) =>
epoch
case None =>
val msg = String.format("Missing element in epoch map %s", epochMap.mkString(", "))
throw new IllegalStateException(msg)
}
if (server.kafkaController.isActive
&& previousEpoch < server.kafkaController.epoch) {
controller = server
found = true
}
}
if (!found) {
Thread.sleep(100)
counter += 1
}
}
// Give it a shot to make sure that sending isn't blocking
try {
controller.kafkaController.sendUpdateMetadataRequest(Seq(0), Set(topicPartition))
} catch {
case e : Throwable => {
fail(e)
}
}
}
}
class MockChannelManager(private val controllerContext: ControllerContext, config: KafkaConfig, metrics: Metrics)
extends ControllerChannelManager(controllerContext, config, Time.SYSTEM, metrics) {
def stopSendThread(brokerId: Int) {
val requestThread = brokerStateInfo(brokerId).requestSendThread
requestThread.isRunning.set(false)
requestThread.interrupt
requestThread.join
}
def shrinkBlockingQueue(brokerId: Int) {
val messageQueue = new LinkedBlockingQueue[QueueItem](1)
val brokerInfo = this.brokerStateInfo(brokerId)
this.brokerStateInfo.put(brokerId, brokerInfo.copy(messageQueue = messageQueue))
}
def resumeSendThread (brokerId: Int) {
this.startRequestSendThread(0)
}
def queueCapacity(brokerId: Int): Int = {
this.brokerStateInfo(brokerId).messageQueue.remainingCapacity
}
def queueSize(brokerId: Int): Int = {
this.brokerStateInfo(brokerId).messageQueue.size
}
}
|
eribeiro/kafka
|
core/src/test/scala/unit/kafka/controller/ControllerFailoverTest.scala
|
Scala
|
apache-2.0
| 6,310
|
package chrome.events
import chrome.events.bindings.Event
import scala.scalajs.js
class EventSource0Impl(event: Event[js.Function0[_]]) extends EventSource[Unit] {
class SubscriptionImpl(fn: Unit => Unit) extends Subscription {
val fn2 = js.Any.fromFunction0(() => fn(()))
event.addListener(fn2)
def cancel(): Unit = {
event.removeListener(fn2)
}
}
def listen(fn: Unit => Unit): Subscription = {
new SubscriptionImpl(fn)
}
}
class EventSource1Impl[A](event: Event[js.Function1[A, _]]) extends EventSource[A] {
class SubscriptionImpl(fn: A => Unit) extends Subscription {
event.addListener(fn)
def cancel(): Unit = {
event.removeListener(fn)
}
}
def listen(fn: A => Unit): Subscription = {
new SubscriptionImpl(fn)
}
}
class EventSource2Impl[A, B](event: Event[js.Function2[A, B, _]]) extends EventSource[(A, B)] {
class SubscriptionImpl(fn: ((A, B)) => Unit) extends Subscription {
val untupled = Function.untupled(fn)
event.addListener(untupled)
def cancel(): Unit = {
event.removeListener(untupled)
}
}
def listen(fn: ((A, B)) => Unit): Subscription = {
new SubscriptionImpl(fn)
}
}
class EventSource3Impl[A, B, C](event: Event[js.Function3[A, B, C, _]]) extends EventSource[(A, B, C)] {
class SubscriptionImpl(fn: ((A, B, C)) => Unit) extends Subscription {
val untupled = Function.untupled(fn)
event.addListener(untupled)
def cancel(): Unit = {
event.removeListener(untupled)
}
}
def listen(fn: ((A, B, C)) => Unit): Subscription = {
new SubscriptionImpl(fn)
}
}
object EventSourceImplicits {
import scala.language.implicitConversions
implicit def eventAsEventSource0(event: Event[js.Function0[_]]): EventSource[Unit] = new EventSource0Impl(event)
implicit def eventAsEventSource1[A](event: Event[js.Function1[A, _]]): EventSource[A] = new EventSource1Impl(event)
implicit def eventAsEventSource2[A, B](event: Event[js.Function2[A, B, _]]): EventSource[(A, B)] =
new EventSource2Impl(event)
implicit def eventAsEventSource3[A, B, C](event: Event[js.Function3[A, B, C, _]]): EventSource[(A, B, C)] =
new EventSource3Impl(event)
}
|
amsayk/scala-js-chrome
|
bindings/src/main/scala/chrome/events/EventSourceImplicits.scala
|
Scala
|
mit
| 2,225
|
package test_data.v20
import scala.xml.Elem
case class SectionAboutYourPayDetails(xml: Elem) {
val rootPath = xml \\ "DWPCATransaction" \\ "DWPCAClaim"
val howToGetPaidQuestion = rootPath \\ "Payment" \\ "InitialAccountQuestion" \\ "QuestionLabel"
val howToGetPaidAnswer = rootPath \\ "Payment" \\ "InitialAccountQuestion" \\ "Answer"
val howOftenGetPaidQuestion = rootPath \\ "Payment" \\ "PaymentFrequency" \\ "QuestionLabel"
val howOftenGetPaidAnswer = rootPath \\ "Payment" \\ "PaymentFrequency" \\ "Answer"
val howOftenGetPaidOther = rootPath \\ "Payment" \\ "PaymentFrequency" \\ "Other"
val bankAccountHolderName = rootPath \\ "Payment" \\ "Account" \\ "HolderName"
val bankAccountHolderNameQuestion = rootPath \\ "Payment" \\ "Account" \\ "HolderName" \\ "QuestionLabel"
val bankAccountHolderNameAnswer = rootPath \\ "Payment" \\ "Account" \\ "HolderName" \\ "Answer"
val bankAccountBankName = rootPath \\ "Payment" \\ "Account" \\ "BuildingSocietyDetails" \\ "Name"
val bankAccountBankNameQuestion = rootPath \\ "Payment" \\ "Account" \\ "BuildingSocietyDetails" \\ "Name" \\ "QuestionLabel"
val bankAccountBankNameAnswer = rootPath \\ "Payment" \\ "Account" \\ "BuildingSocietyDetails" \\ "Name" \\ "Answer"
val bankAccountSortCode = rootPath \\ "Payment" \\ "Account" \\ "BuildingSocietyDetails" \\ "Sortcode"
val bankAccountSortCodeQuestion = rootPath \\ "Payment" \\ "Account" \\ "BuildingSocietyDetails" \\ "Sortcode" \\ "QuestionLabel"
val bankAccountSortCodeAnswer = rootPath \\ "Payment" \\ "Account" \\ "BuildingSocietyDetails" \\ "Sortcode" \\ "Answer"
val bankAccountNumber = rootPath \\ "Payment" \\ "Account" \\ "BuildingSocietyDetails" \\ "AccountNumber"
val bankAccountNumberQuestion = rootPath \\ "Payment" \\ "Account" \\ "BuildingSocietyDetails" \\ "AccountNumber" \\ "QuestionLabel"
val bankAccountNumberAnswer = rootPath \\ "Payment" \\ "Account" \\ "BuildingSocietyDetails" \\ "AccountNumber" \\ "Answer"
val bankAccountReferenceNumber = rootPath \\ "Payment" \\ "Account" \\ "BuildingSocietyDetails" \\ "RollNumber"
val bankAccountReferenceNumberQuestion = rootPath \\ "Payment" \\ "Account" \\ "BuildingSocietyDetails" \\ "RollNumber" \\ "QuestionLabel"
val bankAccountReferenceNumberAnswer = rootPath \\ "Payment" \\ "Account" \\ "BuildingSocietyDetails" \\ "RollNumber" \\ "Answer"
}
|
Department-for-Work-and-Pensions/RenderingService
|
test/test_data/v20/SectionAboutYourPayDetails.scala
|
Scala
|
mit
| 2,300
|
package guige.before
object Client {
def main(args: Array[String]) {
//首先初始化一批用户
val userList = List(
User("苏大", 3), User("牛二", 8), User("张三", 10), User("杨八", 34),
User("李四", 15), User("王五", 18), User("赵六", 23), User("马七", 34)
)
//定义一个用户查询类
val userProvider: TUserProvider = new UserProvider(userList)
//打印出年龄大于20岁的用户
System.out.println("========= 年龄大于20岁的用户 ============")
userProvider.findUserByAgeThan(20).foreach(println)
}
}
|
zj-lingxin/DesignPattern
|
src/main/scala/guige/before/Client.scala
|
Scala
|
mit
| 587
|
package com.sksamuel.elastic4s.requests.searches.aggs.pipeline
import com.sksamuel.elastic4s.requests.script.Script
import com.sksamuel.elastic4s.requests.searches.aggs.AbstractAggregation
import com.sksamuel.elastic4s.requests.searches.sort.Sort
trait PipelineAggregationApi {
def avgBucketAgg(name: String, bucketsPath: String): AvgBucketPipelineAgg = avgBucketAggregation(name, bucketsPath)
def avgBucketAggregation(name: String, bucketsPath: String): AvgBucketPipelineAgg =
AvgBucketPipelineAgg(name, bucketsPath)
def bucketSelectorAggregation(name: String,
script: Script,
bucketsPathMap: Map[String, String]): BucketSelectorPipelineAgg =
BucketSelectorPipelineAgg(name, script, bucketsPathMap)
def bucketSortAggregation(name: String, sort: Seq[Sort]): BucketSortPipelineAgg =
BucketSortPipelineAgg(name, sort)
def bucketScriptAggregation(name: String, script: Script, bucketsPath: Map[String, String]): BucketScriptPipelineAgg =
BucketScriptPipelineAgg(name, script, bucketsPath)
def cumulativeSumAggregation(name: String, bucketsPath: String): CumulativeSumPipelineAgg =
CumulativeSumPipelineAgg(name, bucketsPath)
def derivativeAggregation(name: String, bucketsPath: String): DerivativePipelineAgg =
DerivativePipelineAgg(name, bucketsPath)
def diffAggregation(name: String, bucketsPath: String): DiffPipelineAgg = DiffPipelineAgg(name, bucketsPath)
def extendedStatsBucketAggregation(name: String, bucketsPath: String): ExtendedStatsBucketPipelineAgg =
ExtendedStatsBucketPipelineAgg(name, bucketsPath)
def maxBucketAgg(name: String, bucketsPath: String): MaxBucket = maxBucketAggregation(name, bucketsPath)
def maxBucketAggregation(name: String, bucketsPath: String): MaxBucket =
MaxBucket(name, bucketsPath)
def minBucketAggregation(name: String, bucketsPath: String): MinBucketPipelineAgg =
MinBucketPipelineAgg(name, bucketsPath)
def movingAverageAggregation(name: String, bucketsPath: String): MovAvgPipelineAgg =
MovAvgPipelineAgg(name, bucketsPath)
def percentilesBucketAggregation(name: String, bucketsPath: String): PercentilesBucketPipelineAgg =
PercentilesBucketPipelineAgg(name, bucketsPath)
def statsBucketAggregation(name: String, bucketsPath: String): StatsBucketPipelineAgg =
StatsBucketPipelineAgg(name, bucketsPath)
def sumBucketAggregation(name: String, bucketsPath: String): SumBucketPipelineAgg =
SumBucketPipelineAgg(name, bucketsPath)
}
trait PipelineAgg extends AbstractAggregation
|
stringbean/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/searches/aggs/pipeline/PipelineAggDsl.scala
|
Scala
|
apache-2.0
| 2,580
|
package ru.wordmetrix.webcrawler
/**
* EvaluatePriorityMatrix is an implementation of strategy that estimates
* possible semantic deposit of future web pages into common asset. It uses two
* abstraction:
*
* - SemanticEstimator estimates web pages (seeds) on the base of their
* content.
*
* - NetworkEstimator propagates semantic estimation through the net of web
* pages.
*/
import java.net.URI
import scala.util.Random._
import scala.collection.immutable.SortedSet
import Gather.{GatherLink, GatherLinkContext, GatherSeeds, GatherAllow}
import SampleHierarchy2Priority.SampleHirarchy2PriorityPriority
import SeedQueue.{
SeedQueueAvailable,
SeedQueueGet,
SeedQueueLink,
SeedQueueRequest
}
import Storage.{StorageSign, StorageVictim}
import akka.actor.{Actor, Props, actorRef2Scala}
import ru.wordmetrix.utils.{CFG, CFGAware, debug}
import ru.wordmetrix.smartfile.SmartFile.fromFile
import EvaluatePriorityMatrix._
import akka.actor.ActorRef
import ru.wordmetrix.features.Features
import ru.wordmetrix.webcrawler.GMLStorage._
import akka.pattern.ask
import akka.util.Timeout
import scala.concurrent.duration._
object EvaluatePriorityMatrix {
abstract sealed class EvaluatePriorityMatrixMessage
case class EvaluatePriorityMatrixSeed(seeds: Set[Seed])
extends EvaluatePriorityMatrixMessage
case object EvaluatePriorityMatrixStopTargeting
extends EvaluatePriorityMatrixMessage
case object EvaluatePriorityMatrixDump
extends EvaluatePriorityMatrixMessage
case object EvaluatePriorityMatrixStop extends EvaluatePriorityMatrixMessage
/**
* Define an EvaluatePriorityMatrix
*
* @param storage A storage for pages;
* @param gather An actor that elicits data from pages;
* @param seedqueue A dispatcher of requests;
* @param sample An actor that maintains a sample of mapping content of
* links to priorities;
* @param gml An actor that maintains a storage that dumps network
* into gml;
* @param cfg A configure object;
* @return An props for EvaluatePriorityMatrix.
*/
def props(storage: Props, gather: Props, seedqueue: Props, sample: Props,
gml: Props, linkedvectors: Props, cfg: CFG): Props =
Props(
new EvaluatePriorityMatrix(
storage, gather, seedqueue, sample, gml, linkedvectors,
new NetworkEstimator()(cfg)
)(
cfg,
v => new SemanticEstimator(v)(cfg)
)
)
/**
* Extension of SortedSet to use as priority queue
*/
object PQ {
implicit val o = Ordering.fromLessThan[Item]({
case ((p1, u1), (p2, u2)) =>
if (p1 == p2)
u1 > u2
else
p1 < p2
}).reverse
def apply() = SortedSet[Item]()
def unapply(map: SortedSet[Item]) =
map.headOption match {
case None => None
case Some(x) => Some((x, map - x))
}
}
implicit class PQEx(set: SortedSet[Item]) {
def insert(x: Item) = set + (x)
}
case class RevMap[F](val map: Map[F, Int] = Map[F, Int](),
val rmap: Map[Int, F] = Map[Int, F](),
n: Int = 0) {
def update(word: F): (Int, RevMap[F]) = {
map.get(word) match {
case Some(x) => (x, this)
case None =>
val x = n + 1
(x, copy(
map = map + (word -> x),
rmap = rmap + (x -> word),
x))
}
}
def update(xs: Set[F])(implicit cfg: CFG): (Set[Int], RevMap[F]) =
(xs.foldLeft(Set[Int](), this) {
case ((set, index), (x)) =>
index.update(x) match {
case (n, index) => (set + n, index)
}
})
def decode(xs: Iterable[Int])(implicit cfg: CFG) = xs.map(x => rmap(x))
def decode(x: Int)(implicit cfg: CFG) = rmap(x)
}
}
class EvaluatePriorityMatrix[NE <: NetworkEstimatorBase[NE], SE <: SemanticEstimatorBase[SE]](storageprop: Props,
gatherprop: Props,
seedqueueprop: Props,
sampleprop: Props,
gmlprop: Props,
linkedvectorsprop: Props,
networkestimator: NE)(implicit cfg: CFG, factoryse: V => SE) extends Actor
with CFGAware {
override val name = "Evaluate . Matrix"
import context.dispatcher
import EvaluatePriorityMatrix._
val ns = Iterator.from(1)
val gather = context.actorOf(gatherprop, "Gather")
val seedqueue = context.actorOf(seedqueueprop, "SeedQueue")
val storage = context.actorOf(storageprop, "Storage")
val sample = context.actorOf(sampleprop, "Sample")
val gml = context.actorOf(gmlprop, "GML")
val linkedvectors = context.actorOf(linkedvectorsprop, "LinkedVectors")
gather ! GatherLink(storage, sample, gml, linkedvectors)
seedqueue ! SeedQueueLink(gather)
storage ! StorageVictim(seedqueue)
def receive(): Receive = ({
case EvaluatePriorityMatrixSeed(seeds: Set[Seed]) => {
for (seed <- seeds) {
log("Initial seed: %s", seed)
seedqueue ! SeedQueueRequest(seed)
}
context.become(phase_initialization(seeds.size, AverageVector[Word](), Set()))
}
}: Receive) orElse common()
def common(): Receive = {
case msg@GatherAllow(seeds) =>
debug("allow seeds: %s", seeds)
gather ! msg
}
/**
* Initialization Phase: download initial page(s)
*/
def phase_initialization(n: Int, central: AverageVector[Word], init_seeds: Set[Seed]): Receive = {
case msg@GatherLinkContext(_, _) => sample ! msg
case Gather.GatherSeeds(seed, seeds, v) => {
ns.next()
log("Initial phase, n = %s size = %s, seed = %s", n, seeds.size, seed)
storage ! StorageSign(seed)
if (n > 1) {
context.become(phase_initialization(n - 1, central + v, init_seeds ++ seeds))
} else {
for (seed <- shuffle(init_seeds ++ seeds).toList) {
seedqueue ! SeedQueueRequest(seed)
}
seedqueue ! EvaluatePriorityMatrixStopTargeting
val sense = factoryse((central + v).normal)
log("Start targeting " + sense.size)
context.become(
phase_targeting(
sense,
networkestimator,
RevMap[Seed]()
)
)
}
}
}
/**
* Targeting Phase: accumulate sample of pages until target is locked
*/
def phase_targeting(sense: SE, network: NE, index: RevMap[Seed]): Receive = {
case EvaluatePriorityMatrixStopTargeting => {
log("Targeting impossible, too little casualties")
//context.stop(self)
context.system.terminate()
}
case Gather.GatherSeeds(seed, seeds, v) => {
val n = ns.next()
debug("Targeting with (%d) %s %s %s", n, seed, seeds.size,
network.size)
index.update(seed) match {
case (id, index) => index.update(seeds) match {
case (ids, index) =>
sense.estimate(id, v.normal, {
debug("Seed %s was accepted as target", seed)
storage ! StorageSign(seed)
}) match {
case sense =>
val network1 = network.update(ids, sense.factor, id, v)
log("Check if %s > %s (direction is collinear to specimen)",
sense.factor * sense.central, cfg.targeting)
if (sense.factor * sense.central > cfg.targeting) {
log("Turn into estimation phase")
val network2 = network1.calculate(sense.factor)
seedqueue ! SeedQueueAvailable
self ! EvaluatePriorityMatrixDump
context.become(
phase_estimating(sense, network2, network2.queue(), index)
)
} else {
context.become(
phase_targeting(sense, network1, index)
)
}
}
}
}
}
}
/**
* Estimation Phase: estimate gotten pages and request new one on priority
* base.
*/
def phase_estimating(sense: SE,
network: NE,
queue: SortedSet[Item], index: RevMap[Seed]): Receive = {
case EvaluatePriorityMatrixStop =>
debug("ActorSystem shutdown")
context.system.shutdown()
case GatherSeeds(seed, seeds, v) => {
val n = ns.next()
log("Estimation of seed #%s (%s): %s, queue = %s",
n, cfg.limit, seed, queue.size)
sense match {
case sense: SemanticEstimator if (n % 500 == 0) =>
gml ! GMLStorageEstimator(sense)
case _ =>
}
if (n > cfg.limit) {
log("Limit has been reached")
implicit val timeout = Timeout(5 seconds)
val f = gather ? Gather.GatherDecode(sense.factor)
f.onSuccess {
case x: VS => (cfg.path / "vocabulary.dat") write (x)
}
sample ! EvaluatePriorityMatrixStop
seedqueue ! EvaluatePriorityMatrixStop
sense match {
case sense: SemanticEstimator =>
gml ! GMLStorageEstimator(sense)
case _ =>
}
gml ! EvaluatePriorityMatrixStop
linkedvectors ! EvaluatePriorityMatrixStop
} else {
index.update(seed) match {
case (id, index) => index.update(seeds) match {
case (ids, index) =>
val sense1 = sense.estimate(id, v.normal, {
debug("Seed %s was accepted as target", seed)
storage ! StorageSign(seed)
})
debug("Check if %s > %s (direction is collinear to specimen)",
sense.factor * sense.central, cfg.targeting)
debug("Priorities actual while %s > %s",
sense1.factor.normal * sense.factor.normal, cfg.prioriting)
val network1 = network.check(sense1.factor.normal)
.update(ids, sense.factor, id, v)
val queue2 = network1.queue(queue)
sample ! SampleHirarchy2PriorityPriority(seed,
sense.factor * v.normal)
seedqueue ! SeedQueueAvailable
context.become(phase_estimating(sense1, network1, queue2, index))
}
}
}
}
case SeedQueueGet => {
debug("Get dispather request %s", sender)
queue match {
case PQ((priority, seed), queue) =>
log("Request priority = %s for %s", priority, seed)
//TODO: check continue of estimation phase
sender ! SeedQueueRequest(index.decode(seed))
context.become(phase_estimating(
sense, network.eliminate(seed), queue, index
))
case _ => {
debug("Queue was empty")
}
}
}
}
}
abstract class SemanticEstimatorBase[SE <: SemanticEstimatorBase[SE]] {
def estimate(seed: SeedId, v: V, callback: => Unit): SE
def factor: V
val central: V
val size: Int
}
abstract trait NetworkEstimatorBase[U <: NetworkEstimatorBase[U]] {
def queue(queue: SortedSet[Item] = PQ()): SortedSet[Item]
def calculate(factor: V): U
def update(seeds: Set[SeedId], factor: V, source_seed: SeedId, v: V): U
def check(factor: V): U
def eliminate(seed: SeedId): U
val size: Int
}
|
electricmind/webcrawler
|
src/main/scala/ru/wordmetrix/webcrawler/EvaluatePriorityMatrix.scala
|
Scala
|
apache-2.0
| 11,702
|
/*
* Copyright (C) 2014 - 2017 Contributors as noted in the AUTHORS.md file
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.wegtam.scalatest.tags
import org.scalatest.Tag
/**
* The annotated test needs a H2 database.
*/
object DbTestH2 extends Tag("com.wegtam.scalatest.tags.DbTestH2")
|
Tensei-Data/tensei-agent
|
src/it/scala/com/wegtam/scalatest/tags/DbTestH2.scala
|
Scala
|
agpl-3.0
| 933
|
package scalaSci.CommonMaths
import org.apache.commons.math3.linear._
import scalaSci.RichDouble1DArray
import scalaSci.Vec
// this class performs a higher level interface to the Apache Common Maths,
// Array2DRowRealMatrix class
class Mat(ar: Array2DRowRealMatrix) extends AnyRef with scalaSci.scalaSciMatrix[scalaSci.CommonMaths.Mat] {
var Nrows = ar.getDataRef().length
var Ncols = ar.getDataRef()(0).length
var v = ar.getDataRef()
final def getv = ar.getDataRef()
var rm = ar
final def getNativeMatrixRef() = rm // the scalaSci.CommonMaths.Mat class wraps the org.apache.commons.math.linear
// .Array2DRowRealMatrix class
final def matFromNative = new Mat(rm)
final def matFromNative(ar: Array2DRowRealMatrix) = new Mat(ar.getDataRef())
final def numRows() = Nrows
final def numColumns() = Ncols
final def length() = Nrows * Ncols
final def size() = (Nrows, Ncols)
final def this(n: Int, m: Int) =
this(new Array2DRowRealMatrix(n, m))
final def this(d: Array[Array[Double]]) =
this(new Array2DRowRealMatrix(d, false)) // false means do not copy the array
// construct a Matrix from a tuple of values corresponding to its size
final def this(tuple: (Int, Int)) =
this(tuple._1, tuple._1)
final def set(row: Int, column: Int, value: Double) = {
v(row)(column) = value
}
final def get(row: Int, column: Int) = v(row)(column)
import Mat._
override final def clone() = {
val arc = ar.copy().asInstanceOf[Array2DRowRealMatrix]
new Mat(arc)
}
final def copy() = {
// same as clone()
clone()
}
// copy to a new matrix, perhaps resizing also matrix
final def copy(newNrows: Int, newNcols: Int) = {
var cpMat = new Mat(newNrows, newNcols) // create a new Matrix
val mnNrows = if (newNrows < Nrows) newNrows else Nrows
val mnNcols = if (newNcols < Ncols) newNcols else Ncols
// copy the original matrix whithin
var r = 0;
var c = 0
while (r < mnNrows) {
c = 0
while (c < mnNcols) {
cpMat(r, c) = this(r, c)
c += 1
}
r += 1
}
cpMat
}
final def apply(n: Int) = {
var nr = n / Ncols
var nc = n - nr * Ncols
v(nr)(nc)
}
final def apply(n: Int, m: Int) = v(n)(m)
// updating a single element of the Matrix
final def update(n: Int, m: Int, value: Double) = {
v(n)(m) = value
}
final def *(that: Mat) = new Mat(this.rm.multiply(that.rm))
final def det() = {
var aa = Array.ofDim[Double](this.Nrows, this.Ncols)
var r = 0
while (r < this.Nrows) {
var c = 0
while (c < this.Ncols) {
aa(r)(c) = this(r, c)
c += 1
}
r += 1
}
var rdaa = new scalaSci.RichDouble2DArray(aa)
scalaSci.StaticMaths.det(rdaa)
}
final def inv() = {
var aa = Array.ofDim[Double](this.Nrows, this.Ncols)
var r = 0;
var c = 0
while (r < this.Nrows) {
c = 0
while (c < this.Ncols) {
aa(r)(c) = this(r, c)
c += 1
}
r += 1
}
var iaa = new Jama.jMatrix(aa).inverse()
new scalaSci.CommonMaths.Mat(iaa.getArray())
}
final def norm2(): Double = {
scalaSci.math.LinearAlgebra.LinearAlgebra.norm2(this.toDoubleArray)
}
final def norm1(): Double = {
scalaSci.math.LinearAlgebra.LinearAlgebra.norm1(this.toDoubleArray)
}
final def normF(Mmat: Mat): Double = {
scalaSci.math.LinearAlgebra.LinearAlgebra.normF(this.toDoubleArray)
}
final def normInf(Mmat: Mat): Double = {
scalaSci.math.LinearAlgebra.LinearAlgebra.normInf(this.toDoubleArray)
}
final def trace() = scalaSci.math.LinearAlgebra.LinearAlgebra.trace(v)
final def rank() = scalaSci.math.LinearAlgebra.LinearAlgebra.rank(v)
final def cond() = scalaSci.math.LinearAlgebra.LinearAlgebra.cond(v)
// solve the system using JLAPACK for overdetermine/undetermined cases
final def solve(b: Mat) = {
if (b.numRows() == this.numColumns) // direct solve {
var solution = scalaSci.math.LinearAlgebra.LinearAlgebra.solve(this.v, b.v)
new Mat(solution)
}
else // overdetermined/underdetermined case
new Mat(scalaSci.ILapack.DGELS(this toDoubleArray, b.v))
}
final def \\ (b: Mat) = solve (b)
// slash or right matrix divide
final def / (B: Mat) = this * B.inv ()
final def / (B: scalaSci.RichDouble2DArray) = this * B.inv ()
final def / (B: Array[Array[Double]] ) = this * (new scalaSci.RichDouble2DArray (B) ).inv ()
final def / (B: scalaSci.RichDouble1DArray) = this * (new scalaSci.RichDouble2DArray (B) ).inv ()
// Mat * RichDouble2DArray
override final def * (that: scalaSci.RichDouble2DArray): Mat = {
var rN = this.Nrows;
var rM = this.Ncols;
var sN = that.Nrows;
var sM = that.Ncols;
var v1Colj = new Array[Double] (rM)
var result = new Mat (this.Nrows, that.Ncols)
var j = 0;
var k = 0;
while (j < sM) {
k = 0
while (k < rM) {
v1Colj (k) = that (k, j)
k += 1
}
var i = 0;
while (i < rN) {
var Arowi = this.v (i)
var s = 0.0;
k = 0
while (k < rM) {
s += Arowi (k) * v1Colj (k)
k += 1
}
result (i, j) = s;
i += 1
}
j += 1
}
return result
}
// compute eigenvalues/eigenvectors using MTJ
final def eig () = {
// construct an MTJ Matrix
var mtjMat = new scalaSci.MTJ.Mat (this.toDoubleArray)
mtjMat.eig ()
}
final def svd () = {
var S = scalaSci.ILapack.svd (this.toDoubleArray)
(new scalaSci.RichDouble2DArray (S._1), new scalaSci.RichDouble1DArray (S._2), new scalaSci.RichDouble2DArray (S._3) )
}
// SVD using Apache Common Library
final def asvd (ac: Mat) = {
val rm = ac.getNativeMatrixRef // get the native real matrix reference
val svdObj = new org.apache.commons.math3.linear.SingularValueDecomposition (rm)
(new Mat (svdObj.getU ().getData), new Mat (svdObj.getS ().getData), new Mat (svdObj.getV ().getData) )
}
final def pinv () = {
val ejmlM = new scalaSci.EJML.Mat (this.getv)
val pejml = ejmlM.pinv
val nrows = pejml.Nrows
val ncols = pejml.Ncols
var pM = new Mat (nrows, ncols)
var n = 0
while (n < nrows) {
var m = 0
while (m < ncols) {
pM (n, m) = pejml (n, m)
m += 1
}
n += 1
}
pM
}
// Reduced-Row Echelon form
final def rref () = {
var xd = this.toDoubleArray ()
var exd = new org.ejml.data.DenseMatrix64F (xd)
var reduced = org.ejml.ops.CommonOps.rref (exd, - 1, null)
new Mat (scalaSci.EJML.StaticMathsEJML.DenseMatrixToDoubleArray (reduced) )
}
}
object Mat {
// a conveniency constructor that allows to construct a matrix e.g. as
// var x = Mat(3,7) instead of var x = new Mat(3, 7)
final def apply(nrows: Int, ncols: Int) = new Mat(nrows, ncols)
/* e.g.
var xx = 3.4
var a = Mat( 2, 4,
3.4, 5.6, -6.7, -xx,
-6.1, 2.4, -0.5, cos(0.45*xx))
*/
final def apply(values: Double*) = {
val nrows = values(0).toInt // number of rows
val ncols = values(1).toInt // number of cols
val dvalues = values.toArray
var cpos = 2 // current position in array
var sm = new Mat(nrows, ncols) // create a Mat
for (r <- 0 until nrows)
for (c <- 0 until ncols) {
sm(r, c) = values(cpos) // copy value
cpos += 1
}
sm // return the constructed matrix
}
/* e.g.
var xx = 8.3
var am = $(xx, 1-xx, cos(xx), null, xx+0.3*xx, 5.6, -3.4)
*/
final def $(values: Any*) = {
// count number of nulls, number of nulls will be the number of rows
var nullCnt = 0
for (v <- values)
if (v == null) nullCnt += 1
// count number of columns
var colCnt = 0
var vl = values.length
while (colCnt < vl && values(colCnt) != null)
colCnt += 1
var rowCnt = nullCnt + 1 // number of rows iof the new Matrix
// take the first element.
// It can be either a Matrix or a double number
var vv = values(0)
if (vv.isInstanceOf[scalaSci.scalaSciMatrix[Any]]) {
// we synthesize our Matrix from Matrices
// take parameters of the submatrices
var vv0 = vv.asInstanceOf[scalaSci.scalaSciMatrix[Any]]
var nrowsSubm = vv0.numRows()
var ncolsSubm = vv0.numColumns()
// construct the new Matrix
var nm = new Mat(rowCnt * nrowsSubm, colCnt * ncolsSubm)
var cpos = 0
for (r <- 0 until rowCnt)
for (c <- 0 until colCnt) {
var cv = values(cpos)
if (cv == null) cpos += 1
cv = values(cpos)
var crow = r * nrowsSubm
var ccol = c * ncolsSubm
cv match {
case null =>
case v: scalaSci.scalaSciMatrix[Any] =>
for (rs <- 0 until nrowsSubm)
for (cs <- 0 until ncolsSubm)
nm(crow + rs, ccol + cs) = v(rs, cs)
case _ =>
}
cpos += 1 // next element
}
nm
}
else {
// construct the new Matrix
var nm = new Mat(rowCnt, colCnt)
var cpos = 0
for (r <- 0 until rowCnt)
for (c <- 0 until colCnt) {
var cv = values(cpos)
if (cv == null) cpos += 1
cv = values(cpos)
cv match {
case null =>
case v: Int => nm(r, c) = v
case v: Double => nm(r, c) = v
case _ =>
}
cpos += 1
}
nm
}
}
}
|
scalalab/scalalab
|
source/src/main/scala/scalaSci/CommonMaths/Mat.scala
|
Scala
|
mit
| 9,157
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.optimization.tfocs.fs.generic.double
import org.apache.spark.mllib.optimization.tfocs.{ Mode, ProxCapableFunction, ProxMode, ProxValue, SmoothFunction, Value, VectorSpace }
import org.apache.spark.mllib.optimization.tfocs.VectorSpace._
import org.apache.spark.storage.StorageLevel
/**
* A smooth objective function created by applying smoothing to a prox capable function at a prox
* center. This smoothing function is the basis of the smooth conic dual solver in TFOCS_SCD.scala.
*
* @param objectiveF The prox capable function.
* @param mu The smoothing parameter.
* @param x0 The prox center.
* @param vs A VectorSpace implementation supporting operations on vectors of type X.
* @tparam X A type representing a vector on which the function operates.
*
* NOTE In matlab tfocs this functionality is implemented in tfocs_SCD.m.
* @see [[https://github.com/cvxr/TFOCS/blob/master/tfocs_SCD.m]]
*/
class SmoothDual[X](objectiveF: ProxCapableFunction[X], mu: Double, x0: X)(
implicit vs: VectorSpace[X]) extends SmoothFunction[X] {
vs.cache(x0)
override def apply(ATz: X, mode: Mode): Value[X] = {
val offsetCenter = vs.combine(mu, ATz, 1.0, x0)
val ProxValue(proxF, Some(proxMinimizer)) = objectiveF(offsetCenter, mu, ProxMode(mode.f, true))
// Cache proxMinimizer when it will be required more than once.
if (mode.f) vs.cache(proxMinimizer)
val f = if (mode.f) {
// TODO This might be optimized as a single spark job.
val diff = vs.combine(1.0, x0, -1.0, proxMinimizer)
Some(vs.dot(ATz, proxMinimizer) - proxF.get - (0.5 / mu) * vs.dot(diff, diff))
} else {
None
}
val g = if (mode.g) {
Some(vs.combine(-1.0, proxMinimizer, 0.0, proxMinimizer))
} else {
None
}
Value(f, g)
}
}
|
databricks/spark-tfocs
|
src/main/scala/org/apache/spark/mllib/optimization/tfocs/fs/generic/double/SmoothDual.scala
|
Scala
|
apache-2.0
| 2,621
|
/**
* Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0
* See accompanying LICENSE file.
*/
package kafka.manager.utils
import java.util.Properties
import kafka.manager.ActorModel._
import kafka.manager.{ClusterConfig, Kafka_0_8_2_0}
import kafka.manager.utils.zero81._
import org.apache.zookeeper.data.Stat
import scala.concurrent.Future
/**
* @author hiral
*/
class TestReassignPartitions extends CuratorAwareTest {
import ReassignPartitionErrors._
private[this] val adminUtils = new AdminUtils(Kafka_0_8_2_0)
private[this] val reassignPartitionCommand = new ReassignPartitionCommand(adminUtils)
private[this] val brokerList = IndexedSeq(1,2,3)
private[this] val defaultClusterConfig = ClusterConfig("test","0.8.2.0","localhost:2818",100,false,true)
private[this] def mytopic1 : TopicIdentity = getTopicIdentity("mytopic1")
private[this] def mytopic2 : TopicIdentity = getTopicIdentity("mytopic2")
private[this] def mytopic3 : TopicIdentity = getTopicIdentity("mytopic3")
override protected def beforeAll(): Unit = {
super.beforeAll()
withCurator { curator =>
val properties = new Properties()
properties.put(LogConfig.RententionMsProp,"86400000")
adminUtils.createTopic(curator,brokerList,"mytopic1",3,3,properties)
adminUtils.createTopic(curator,brokerList,"mytopic2",6,3)
adminUtils.createTopic(curator,brokerList,"mytopic3",9,3)
}
}
private[this] def getTopicIdentity(topic: String): TopicIdentity = {
produceWithCurator { curator =>
val stat = new Stat
val json : String = curator.getData.storingStatIn(stat).forPath(ZkUtils.getTopicPath(topic))
val configStat = new Stat
val configJson : String = curator.getData.storingStatIn(configStat).forPath(ZkUtils.getTopicConfigPath(topic))
val td: TopicDescription = TopicDescription(topic,(stat.getVersion,json),None,Future.successful(Map.empty),Option((configStat.getVersion,configJson)),false)
TopicIdentity.from(brokerList.size,td,None, defaultClusterConfig)
}
}
test("reassign partitions with empty set") {
withCurator { curator =>
assert(reassignPartitionCommand.executeAssignment(curator,Map.empty, Map.empty).isFailure)
assert(curator.checkExists().forPath(ZkUtils.ReassignPartitionsPath) == null)
}
}
test("reassign partitions with out of sync partition count") {
checkError[PartitionsOutOfSync] {
withCurator { curator =>
val current = Map("mytopic1" -> mytopic1, "mytopic2" -> mytopic2, "mytopic3" -> mytopic3)
val generated = current.map { case (t,td) =>
(t,reassignPartitionCommand.generateAssignment(
brokerList,
td.copy(partitions = td.partitions - 1, partitionsIdentity = td.partitionsIdentity - (td.partitions - 1))).get)
}
reassignPartitionCommand.executeAssignment(curator,current,generated).get
}
}
}
test("reassign partitions with out of sync replication factor") {
checkError[ReplicationOutOfSync] {
withCurator { curator =>
val current = Map("mytopic1" -> mytopic1, "mytopic2" -> mytopic2, "mytopic3" -> mytopic3)
val generated = current.map { case (t,td) =>
(t,reassignPartitionCommand.generateAssignment(
brokerList,
td.copy(partitionsIdentity = td.partitionsIdentity.map { case (p,l) => (p, l.copy(replicas = l.replicas.drop(1)))})).get)
}
reassignPartitionCommand.executeAssignment(curator,current,generated).get
}
}
}
test("reassign partitions") {
withCurator { curator =>
val current = Map("mytopic1" -> mytopic1, "mytopic2" -> mytopic2, "mytopic3" -> mytopic3)
val generated = current.map { case (t,td) =>
(t,reassignPartitionCommand.generateAssignment(
brokerList,
td).get)
}
assert(reassignPartitionCommand.executeAssignment(curator,current,generated).isSuccess)
}
}
test("reassign partitions already running") {
checkError[ReassignmentAlreadyInProgress] {
withCurator { curator =>
val current = Map("mytopic1" -> mytopic1, "mytopic2" -> mytopic2, "mytopic3" -> mytopic3)
val generated = current.map { case (t,td) =>
(t,reassignPartitionCommand.generateAssignment(
brokerList,
td).get)
}
reassignPartitionCommand.executeAssignment(curator,current,generated).get
}
}
}
}
|
cvcal/kafka-manager
|
test/kafka/manager/utils/TestReassignPartitions.scala
|
Scala
|
apache-2.0
| 4,474
|
package benchmark
/** Builder of a [[KeySeq]]
*
* @tparam K the type of the keys
*/
trait KeySeqBuilder[K] {
/** Return a [[KeySeq]] having at least the given size. */
def build(size: Int): KeySeq[K]
}
object KeySeqBuilder {
/** Builder of a sequence of `Int` keys.
* Simply maps the sequence index to itself.
*/
implicit object IntKeySeqBuilder extends KeySeqBuilder[Int] {
def build(_size: Int) = new KeySeq[Int] {
def apply(idx: Int) = idx
def size = _size
}
}
/** Builder of a sequence of `AnyRef` keys. */
implicit object AnyRefKeySeqBuilder extends KeySeqBuilder[AnyRef] {
def build(_size: Int) = new KeySeq[AnyRef] {
private[this] val arr = new Array[AnyRef](size)
for (i <- 0 until size) arr(i) = new AnyRef()
def apply(idx: Int) = arr(idx)
def size = _size
}
}
}
|
felixmulder/scala
|
test/benchmarks/src/main/scala/benchmark/KeySeqBuilder.scala
|
Scala
|
bsd-3-clause
| 859
|
object PinCodes {
def toBase( from:BigInt, base:Array[Char]) : String = {
var valor = from;
var converted:String = ""
val baseLen = base.length()
while (valor > 0) {
var resto = valor % baseLen;
valor = valor / baseLen;
converted += base.charAt(resto.toInt);
}
return converted.reverse
}
def main(args: Array[String]) {
//if (args.length != 2) {}
println("quais caracteres sao validos no pincode? (sem nenhum separador)")
val baseString = readLine()
println()
// @todo remover chars duplicados
println("quantos numeros voce quer gerar?")
val generate:BigInt = readInt() + 1 // incremento 1 para não iniciar sempre com firstBase10
println()
println("qual sera o prefixo do pincode? (vazio para nenhum)")
val prefix = readLine()
println()
println("quantos caracteres tera cada pincode? (sem contar o prefixo)")
val pinCodeLen = readInt()
println()
val base = baseString.toCharArray
val baseLen:BigInt = base.length()
val firstBase10 = baseLen.pow(pinCodeLen - 1)
val lastBase10 = baseLen.pow(pinCodeLen) - 1
val countMaxBaseN = lastBase10 - firstBase10
val distribute = countMaxBaseN / generate
if (generate > countMaxBaseN) {
println()
println("impossivel gerar esta quantidade de pincodes")
println("podemos gerar " + countMaxBaseN + " numeros com " + pinCodeLen + " caracteres na base " + baseLen + ".");
println("para gerar " + generate + " numeros podemos usa a distribuicao " + distribute)
System.exit(1)
}
val fileName = "pincodes-gerados.txt";
println("os pincodes serao gerados no arquivo " + fileName)
println()
var rand = new scala.util.Random
var current = firstBase10 + ( rand.nextInt(Integer.MAX_VALUE) % distribute );
var generated = 1;
val out = new java.io.FileWriter(fileName)
while (generated < generate) {
out.write(prefix + toBase(current, base) + "\\r\\n")
current += (rand.nextInt(Integer.MAX_VALUE) % distribute) + 1
generated += 1
}
out.close
println()
println()
println( "pincodes gerados!" );
}
}
|
thiagooak/gera-pincode
|
PinCodes.scala
|
Scala
|
mit
| 2,061
|
package slick.relational
import slick.ast._
import slick.basic.{BasicActionComponent, BasicProfile}
import slick.compiler.{EmulateOuterJoins, Phase, QueryCompiler}
import slick.dbio._
import slick.lifted.FunctionSymbolExtensionMethods._
import slick.lifted._
import scala.language.{higherKinds, implicitConversions}
import scala.reflect.ClassTag
/** A profile for relational databases that does not assume the existence
* of SQL (or any other text-based language for executing statements).
* It requires a relational table structure as its basic model of data. */
trait RelationalProfile extends BasicProfile with RelationalTableComponent
with RelationalSequenceComponent with RelationalTypesComponent
with RelationalActionComponent { self: RelationalProfile =>
@deprecated("Use the Profile object directly instead of calling `.profile` on it", "3.2")
override val profile: RelationalProfile = this
type Backend <: RelationalBackend
override protected def computeCapabilities = super.computeCapabilities ++ RelationalCapabilities.all
trait API extends super.API with ImplicitColumnTypes {
type FastPath[T] = SimpleFastPathResultConverter[ResultConverterDomain, T]
type Table[T] = self.Table[T]
type Sequence[T] = self.Sequence[T]
val Sequence = self.Sequence
type ColumnType[T] = self.ColumnType[T]
type BaseColumnType[T] = self.BaseColumnType[T]
val MappedColumnType = self.MappedColumnType
@deprecated("Use an explicit conversion to an Option column with `.?`", "3.0")
implicit def columnToOptionColumn[T : BaseTypedType](c: Rep[T]): Rep[Option[T]] = c.?
implicit def valueToConstColumn[T : TypedType](v: T): LiteralColumn[T] = new LiteralColumn[T](v)
implicit def columnToOrdered[T : TypedType](c: Rep[T]): ColumnOrdered[T] = ColumnOrdered[T](c, Ordering())
implicit def tableQueryToTableQueryExtensionMethods[T <: RelationalProfile#Table[_], U](q: Query[T, U, Seq] with TableQuery[T]): TableQueryExtensionMethods[T, U] =
new TableQueryExtensionMethods[T, U](q)
implicit def streamableCompiledInsertActionExtensionMethods[EU](c: StreamableCompiled[_, _, EU]): InsertActionExtensionMethods[EU] = createInsertActionExtensionMethods[EU](c.compiledInsert.asInstanceOf[CompiledInsert])
implicit def queryInsertActionExtensionMethods[U, C[_]](q: Query[_, U, C]): InsertActionExtensionMethods[U] = createInsertActionExtensionMethods[U](compileInsert(q.toNode))
implicit def schemaActionExtensionMethods(sd: SchemaDescription): SchemaActionExtensionMethods = createSchemaActionExtensionMethods(sd)
implicit def fastPathExtensionMethods[T, P](mp: MappedProjection[T, P]): FastPathExtensionMethods[ResultConverterDomain, T, P] = new FastPathExtensionMethods[ResultConverterDomain, T, P](mp)
}
val api: API
final lazy val compiler = computeQueryCompiler
protected def computeQueryCompiler: QueryCompiler = {
val base = QueryCompiler.standard
val canJoinLeft = capabilities contains RelationalCapabilities.joinLeft
val canJoinRight = capabilities contains RelationalCapabilities.joinRight
val canJoinFull = capabilities contains RelationalCapabilities.joinFull
if(canJoinLeft && canJoinRight && canJoinFull) base
else base.addBefore(new EmulateOuterJoins(canJoinLeft, canJoinRight), Phase.expandRecords)
}
class TableQueryExtensionMethods[T <: RelationalProfile#Table[_], U](val q: Query[T, U, Seq] with TableQuery[T]) {
/** Get the schema description (DDL) for this table. */
def schema: SchemaDescription = buildTableSchemaDescription(q.shaped.value.asInstanceOf[Table[_]])
/** Create a `Compiled` query which selects all rows where the specified
* key matches the parameter value. */
def findBy[P](f: (T => Rep[P]))(implicit ashape: Shape[ColumnsShapeLevel, Rep[P], P, Rep[P]], pshape: Shape[ColumnsShapeLevel, P, P, _]): CompiledFunction[Rep[P] => Query[T, U, Seq], Rep[P], P, Query[T, U, Seq], Seq[U]] = {
import self.api._
Compiled { (p: Rep[P]) => (q: Query[T, U, Seq]).filter(table => Library.==.column[Boolean](f(table).toNode, p.toNode)) }
}
}
/** Run a query synchronously on the provided session. This is used by DistributedProfile until we
* can make it fully asynchronous. */
def runSynchronousQuery[R](tree: Node, param: Any)(implicit session: Backend#Session): R
class FastPathExtensionMethods[M <: ResultConverterDomain, T, P](val mp: MappedProjection[T, P]) {
def fastPath(fpf: (TypeMappingResultConverter[M, T, _] => SimpleFastPathResultConverter[M, T])): MappedProjection[T, P] = mp.genericFastPath {
case tm @ TypeMappingResultConverter(_: ProductResultConverter[_, _], _, _) =>
fpf(tm.asInstanceOf[TypeMappingResultConverter[M, T, _]])
case tm => tm
}
}
}
object RelationalProfile {
/** Extra column options for RelationalProfile */
object ColumnOption {
/** Default value for the column. Needs to wrap an Option for nullable Columns. */
case class Default[T](val defaultValue: T) extends ColumnOption[T]
/** Number of unicode characters for string-like types. Unlike DBType this is portable
* between different DBMS. Note that for DDL Slick currently picks type CHAR when
* varying=false and VARCHAR when varying=true. Slick uses VARCHAR or VARCHAR(254) in DDL for
* String columns if neither ColumnOption DBType nor Length are given.
*
* @param varying indicates wether this is just the maximum length of a varying */
case class Length(length: Int, varying: Boolean = true) extends ColumnOption[Nothing]
}
}
trait RelationalTableComponent { self: RelationalProfile =>
def buildTableSchemaDescription(table: Table[_]): SchemaDescription
trait ColumnOptions {
val PrimaryKey = ColumnOption.PrimaryKey
def Default[T](defaultValue: T) = RelationalProfile.ColumnOption.Default[T](defaultValue)
val AutoInc = ColumnOption.AutoInc
val Unique = ColumnOption.Unique
val Length = RelationalProfile.ColumnOption.Length
}
val columnOptions: ColumnOptions = new ColumnOptions {}
abstract class Table[T](_tableTag: Tag, _schemaName: Option[String], _tableName: String) extends AbstractTable[T](_tableTag, _schemaName, _tableName) { table =>
final type TableElementType = T
def this(_tableTag: Tag, _tableName: String) = this(_tableTag, None, _tableName)
def tableProvider: RelationalProfile = self
def tableIdentitySymbol: TableIdentitySymbol = SimpleTableIdentitySymbol(self, schemaName.getOrElse("_"), tableName)
val O: self.columnOptions.type = columnOptions
/**
* Note that Slick uses VARCHAR or VARCHAR(254) in DDL for String
* columns if neither ColumnOption DBType nor Length are given.
*/
def column[C](n: String, options: ColumnOption[C]*)(implicit tt: TypedType[C]): Rep[C] = {
if(tt == null) throw new NullPointerException(
"implicit TypedType[C] for column[C] is null. "+
"This may be an initialization order problem. "+
"When using a MappedColumnType, you may want to change it from a val to a lazy val or def.")
new Rep.TypedRep[C] {
override def toNode =
Select((tableTag match {
case r: RefTag => r.path
case _ => tableNode
}), FieldSymbol(n)(options, tt)) :@ tt
override def toString = (tableTag match {
case r: RefTag => "(" + _tableName + " " + r.path + ")"
case _ => _tableName
}) + "." + n
}
}
}
}
trait RelationalSequenceComponent { self: RelationalProfile =>
def buildSequenceSchemaDescription(seq: Sequence[_]): SchemaDescription
class Sequence[T] private[Sequence] (val name: String,
val _minValue: Option[T],
val _maxValue: Option[T],
val _increment: Option[T],
val _start: Option[T],
val _cycle: Boolean)(implicit val tpe: TypedType[T], val integral: Integral[T])
{ seq =>
def min(v: T) = new Sequence[T](name, Some(v), _maxValue, _increment, _start, _cycle)
def max(v: T) = new Sequence[T](name, _minValue, Some(v), _increment, _start, _cycle)
def inc(v: T) = new Sequence[T](name, _minValue, _maxValue, Some(v), _start, _cycle)
def start(v: T) = new Sequence[T](name, _minValue, _maxValue, _increment, Some(v), _cycle)
def cycle = new Sequence[T](name, _minValue, _maxValue, _increment, _start, true)
final def next = Library.NextValue.column[T](toNode)
final def curr = Library.CurrentValue.column[T](toNode)
def toNode = SequenceNode(name)(_increment.map(integral.toLong).getOrElse(1))
def schema: SchemaDescription = buildSequenceSchemaDescription(this)
}
object Sequence {
def apply[T : TypedType : Integral](name: String) = new Sequence[T](name, None, None, None, None, false)
}
}
trait RelationalTypesComponent { self: RelationalProfile =>
type ColumnType[T] <: TypedType[T]
type BaseColumnType[T] <: ColumnType[T] with BaseTypedType[T]
val MappedColumnType: MappedColumnTypeFactory
trait MappedColumnTypeFactory {
def base[T : ClassTag, U : BaseColumnType](tmap: T => U, tcomap: U => T): BaseColumnType[T]
protected[this] def assertNonNullType(t: BaseColumnType[_]): Unit =
if(t == null)
throw new NullPointerException("implicit BaseColumnType[U] for MappedColumnType.base[T, U] is null. This may be an initialization order problem.")
}
trait ImplicitColumnTypes {
implicit def isomorphicType[A, B](implicit iso: Isomorphism[A, B], ct: ClassTag[A], jt: BaseColumnType[B]): BaseColumnType[A] =
MappedColumnType.base[A, B](iso.map, iso.comap)
implicit def booleanColumnType: BaseColumnType[Boolean]
implicit def bigDecimalColumnType: BaseColumnType[BigDecimal] with NumericTypedType
implicit def byteColumnType: BaseColumnType[Byte] with NumericTypedType
implicit def charColumnType: BaseColumnType[Char]
implicit def doubleColumnType: BaseColumnType[Double] with NumericTypedType
implicit def floatColumnType: BaseColumnType[Float] with NumericTypedType
implicit def intColumnType: BaseColumnType[Int] with NumericTypedType
implicit def longColumnType: BaseColumnType[Long] with NumericTypedType
implicit def shortColumnType: BaseColumnType[Short] with NumericTypedType
implicit def stringColumnType: BaseColumnType[String]
}
}
trait RelationalActionComponent extends BasicActionComponent { self: RelationalProfile =>
//////////////////////////////////////////////////////////// Insert Actions
type InsertActionExtensionMethods[T] <: InsertActionExtensionMethodsImpl[T]
def createInsertActionExtensionMethods[T](compiled: CompiledInsert): InsertActionExtensionMethods[T]
trait InsertActionExtensionMethodsImpl[T] {
/** The result type when inserting a single value. */
type SingleInsertResult
/** The result type when inserting a collection of values. */
type MultiInsertResult
/** An Action that inserts a single value. */
def += (value: T): ProfileAction[SingleInsertResult, NoStream, Effect.Write]
/** An Action that inserts a collection of values. */
def ++= (values: Iterable[T]): ProfileAction[MultiInsertResult, NoStream, Effect.Write]
}
//////////////////////////////////////////////////////////// Schema Actions
type SchemaActionExtensionMethods <: SchemaActionExtensionMethodsImpl
def createSchemaActionExtensionMethods(schema: SchemaDescription): SchemaActionExtensionMethods
trait SchemaActionExtensionMethodsImpl {
/** Create an Action that creates the entities described by this schema description. */
def create: ProfileAction[Unit, NoStream, Effect.Schema]
/** Create an Action that drops the entities described by this schema description. */
def drop: ProfileAction[Unit, NoStream, Effect.Schema]
/** Create an Action that truncates entries described by this schema description */
def truncate: ProfileAction[Unit, NoStream, Effect.Schema]
}
}
|
trevorsibanda/slick
|
slick/src/main/scala/slick/relational/RelationalProfile.scala
|
Scala
|
bsd-2-clause
| 12,134
|
/**
* Copyright (c) 2014-2016 Tim Bruijnzeels
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of this software, nor the names of its contributors, nor
* the names of the contributors' employers may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package nl.bruijnzeels.tim.rpki.ca
import java.math.BigInteger
import java.net.URI
import java.util.UUID
import net.ripe.ipresource.IpResourceSet
import net.ripe.rpki.commons.crypto.cms.manifest.ManifestCms
import net.ripe.rpki.commons.crypto.cms.roa.RoaCms
import net.ripe.rpki.commons.crypto.crl.X509Crl
import net.ripe.rpki.commons.crypto.x509cert.X509ResourceCertificate
import net.ripe.rpki.commons.provisioning.cms.ProvisioningCmsObject
import net.ripe.rpki.commons.provisioning.payload.issue.request.CertificateIssuanceRequestPayload
import nl.bruijnzeels.tim.rpki.ca.provisioning.{ChildIdentity, MyIdentity, ParentIdentity}
import nl.bruijnzeels.tim.rpki.common.cqrs.Event
import nl.bruijnzeels.tim.rpki.common.domain.{Revocation, RoaAuthorisation, SigningMaterial}
import nl.bruijnzeels.tim.rpki.publication.messages.{Publish, Withdraw}
/*
All EVENTS for the CertificateAuthority AggregateRoot.
All in one file so that sealed traits can be used, and we get compiler warnings for missing implementations
for handling these events.
*/
sealed trait CertificateAuthorityEvent extends Event
case class CertificateAuthorityCreated(aggregateId: UUID, name: String, baseUrl: URI, rrdpNotifyUrl: URI) extends CertificateAuthorityEvent
case class ResourceClassCreated(resourceClassName: String) extends CertificateAuthorityEvent
case class ResourceClassRemoved(resourceClassName: String) extends CertificateAuthorityEvent
case class ProvisioningCommunicatorCreated(myIdentity: MyIdentity) extends CertificateAuthorityEvent
sealed trait ProvisioningCommunicatorEvent extends CertificateAuthorityEvent
case class ProvisioningCommunicatorAddedChild(childIdentity: ChildIdentity) extends ProvisioningCommunicatorEvent
case class ProvisioningCommunicatorPerformedChildExchange(exchange: ProvisioningChildExchange) extends ProvisioningCommunicatorEvent
case class ProvisioningChildExchange(childId: UUID, request: ProvisioningCmsObject, response: ProvisioningCmsObject)
case class ProvisioningCommunicatorAddedParent(parentIdentity: ParentIdentity) extends ProvisioningCommunicatorEvent
case class ProvisioningCommunicatorPerformedParentExchange(exchange: ProvisioningParentExchange) extends ProvisioningCommunicatorEvent
case class ProvisioningParentExchange(request: ProvisioningCmsObject, response: ProvisioningCmsObject)
sealed trait ResourceClassEvent extends CertificateAuthorityEvent {
def resourceClassName: String
}
sealed trait SignerEvent extends ResourceClassEvent
case class SignerCreated(resourceClassName: String) extends SignerEvent
case class SignerSigningMaterialCreated(resourceClassName: String, signingMaterial: SigningMaterial) extends SignerEvent
case class SignerCreatedPendingCertificateRequest(resourceClassName: String, request: CertificateIssuanceRequestPayload) extends SignerEvent
case class SignerReceivedCertificate(resourceClassName: String, certificate: X509ResourceCertificate) extends SignerEvent
case class SignerSignedTaCertificate(resourceClassName: String, certificate: X509ResourceCertificate) extends SignerEvent
case class SignerSignedManifest(resourceClassName: String, manifest: ManifestCms) extends SignerEvent
case class SignerSignedCaCertificate(resourceClassName: String, certificate: X509ResourceCertificate) extends SignerEvent
case class SignerRemovedCaCertificate(resourceClassName: String, certificate: X509ResourceCertificate) extends SignerEvent
case class SignerAddedRevocation(resourceClassName: String, revocation: Revocation) extends SignerEvent
case class SignerSignedRoaCms(resourceClassName: String, roaCms: RoaCms) extends SignerEvent
case class SignerRemovedRoaCms(resourceClassName: String, roaCms: RoaCms) extends SignerEvent
sealed trait PublicationSetEvent extends SignerEvent
case class SignerUpdatedPublicationSet(resourceClassName: String, number: BigInteger, newMft: ManifestCms, newCrl: X509Crl, publishes: List[Publish] = List.empty, withdraws: List[Withdraw] = List.empty) extends PublicationSetEvent
case class SignerUnpublishedAll(resourceClassName: String, withdraws: List[Withdraw]) extends SignerEvent
case class ChildCreated(resourceClassName: String, childId: UUID, entitledResources: IpResourceSet) extends ResourceClassEvent
case class ChildRemoved(resourceClassName: String, childId: UUID) extends ResourceClassEvent
sealed trait ChildEvent extends ResourceClassEvent {
def childId: UUID
def resourceClassName: String
}
case class ChildUpdatedResourceEntitlements(resourceClassName: String, childId: UUID, entitledResources: IpResourceSet) extends ChildEvent
case class ChildReceivedCertificate(resourceClassName: String, childId: UUID, certificate: X509ResourceCertificate) extends ChildEvent
sealed trait RoaConfigurationEvent extends CertificateAuthorityEvent
case class RoaConfigurationPrefixAdded(roaAuthorisation: RoaAuthorisation) extends RoaConfigurationEvent
case class RoaConfigurationPrefixRemoved(roaAuthorisation: RoaAuthorisation) extends RoaConfigurationEvent
|
timbru/rpki-ca
|
src/main/scala/nl/bruijnzeels/tim/rpki/ca/CertificateAuthorityEvents.scala
|
Scala
|
bsd-3-clause
| 6,619
|
package com.github.tminglei.slickpg
import org.scalatest.funsuite.AnyFunSuite
import slick.jdbc.GetResult
import scala.concurrent.Await
import scala.concurrent.duration._
class PgHStoreSupportSuite extends AnyFunSuite with PostgresContainer {
import MyPostgresProfile.api._
lazy val db = Database.forURL(url = container.jdbcUrl, driver = "org.postgresql.Driver")
case class MapBean(id: Long, hstore: Map[String, String])
class HStoreTestTable(tag: Tag) extends Table[MapBean](tag, "HStoreTest") {
def id = column[Long]("id", O.AutoInc, O.PrimaryKey)
def hstore = column[Map[String, String]]("hstoreMap", O.Default(Map.empty))
def * = (id, hstore) <> (MapBean.tupled, MapBean.unapply)
}
val HStoreTests = TableQuery[HStoreTestTable]
//------------------------------------------------------------------------------
val testRec1 = MapBean(33L, Map("a"->"val1", "b"->"val3", "c"->"321"))
val testRec2 = MapBean(35L, Map("a"->"val7", "e"->"val33", "c"->"111"))
val testRec3 = MapBean(37L, Map("a"->null, "c"->"105"))
val testRec4 = MapBean(41L, Map.empty[String, String])
test("Hstore Lifted support") {
Await.result(db.run(
DBIO.seq(
HStoreTests.schema create,
///
HStoreTests forceInsertAll List(testRec1, testRec2, testRec3, testRec4)
).andThen(
DBIO.seq(
HStoreTests.to[List].result.map(
r => assert(List(testRec1, testRec2, testRec3, testRec4) === r)
),
// ->
HStoreTests.filter(_.id === testRec1.id.bind).map(_.hstore.+>("a")).result.head.map(
r => assert(Some("val1") === r)
),
HStoreTests.filter(_.hstore.+>("a") === "val7".bind).sortBy(_.id).to[List].result.map(
r => assert(List(testRec2).map(_.hstore) === r.map(_.hstore))
),
HStoreTests.filter(_.hstore.+>("c").asColumnOf[Long] === 111L.bind).sortBy(_.id).to[List].result.map(
r => assert(List(testRec2).map(_.hstore) === r.map(_.hstore))
),
// >>
HStoreTests.filter(_.hstore.>>[Long]("c".bind) === 111L.bind).sortBy(_.id).to[List].result.map(
r => assert(List(testRec2).map(_.hstore) === r.map(_.hstore))
),
// ?
HStoreTests.filter(_.hstore.??("a".bind)).sortBy(_.id).to[List].result.map(
r => assert(List(testRec1, testRec2, testRec3).map(_.hstore) === r.map(_.hstore))
),
// defined
HStoreTests.filter(_.hstore.?*("a".bind)).sortBy(_.id).to[List].result.map(
r => assert(List(testRec1, testRec2).map(_.hstore) === r.map(_.hstore))
),
// ?&
HStoreTests.filter(_.hstore.?&(List("a").bind)).sortBy(_.id).to[List].result.map(
r => assert(List(testRec1, testRec2, testRec3).map(_.hstore) === r.map(_.hstore))
),
// ?|
HStoreTests.filter(_.hstore.?|(List("a", "b", "c").bind)).sortBy(_.id).to[List].result.map(
r => assert(List(testRec1, testRec2, testRec3).map(_.hstore) === r.map(_.hstore))
),
// @>
HStoreTests.filter(_.hstore @> Map("a"->"val7", "e"->"val33").bind).sortBy(_.id).to[List].result.map(
r => assert(List(testRec2).map(_.hstore) === r.map(_.hstore))
),
// <@
HStoreTests.filter(Map("a"->"val7", "e"->"val33").bind <@: _.hstore).sortBy(_.id).to[List].result.map(
r => assert(List(testRec2).map(_.hstore) === r.map(_.hstore))
),
// +
HStoreTests.filter(_.id === 37L).map(t => t.hstore @+ Map("a"->"test").bind).result.head.map(
r => assert(Map("a"->"test", "c"->"105") === r)
),
// -
HStoreTests.filter(_.id === 37L).map(t => t.hstore @- Map("a"->"111", "c"->"105").bind).result.head.map(
r => assert(Map("a"->null) === r)
),
HStoreTests.filter(_.id === 37L).map(t => t.hstore -- List("a").bind).result.head.map(
r => assert(Map("c"->"105") === r)
),
HStoreTests.filter(_.id === 37L).map(t => t.hstore -/ "a".bind).result.head.map(
r => assert(Map("c"->"105") === r)
),
// slice
HStoreTests.filter(_.id === 33L).map(t => t.hstore slice List("a", "b").bind).result.head.map(
r => assert(Map("a"->"val1", "b"->"val3") === r)
)
)
).andFinally(
HStoreTests.schema drop
).transactionally
), Duration.Inf)
}
//------------------------------------------------------------------------------
test("Hstore Plain SQL support") {
import MyPostgresProfile.plainAPI._
implicit val getMapBeanResult = GetResult(r => MapBean(r.nextLong(), r.nextHStore()))
val b = MapBean(34L, Map("a"->"val1", "b"->"val3", "c"->"321"))
Await.result(db.run(
DBIO.seq(
sqlu"""create table HStoreTest(
id int8 not null primary key,
hstoreMap hstore not null)
""",
///
sqlu""" insert into HStoreTest values(${b.id}, ${b.hstore}) """,
sql""" select * from HStoreTest where id = ${b.id} """.as[MapBean].head.map(
r => assert(b === r)
),
///
sqlu"drop table if exists HStoreTest cascade"
).transactionally
), Duration.Inf)
}
}
|
tminglei/slick-pg
|
src/test/scala/com/github/tminglei/slickpg/PgHStoreSupportSuite.scala
|
Scala
|
bsd-2-clause
| 5,359
|
package idv.brianhsu.maidroid.plurk.activity
import idv.brianhsu.maidroid.ui.model._
import idv.brianhsu.maidroid.plurk._
import idv.brianhsu.maidroid.plurk.dialog._
import idv.brianhsu.maidroid.plurk.view._
import idv.brianhsu.maidroid.plurk.util._
import idv.brianhsu.maidroid.plurk.fragment._
import idv.brianhsu.maidroid.plurk.view._
import idv.brianhsu.maidroid.ui.util.AsyncUI._
import org.bone.soplurk.api.PlurkAPI._
import org.bone.soplurk.constant.CommentSetting
import org.bone.soplurk.model._
import android.app.Activity
import android.os.Bundle
import android.content.Intent
import android.content.DialogInterface
import android.content.pm.ActivityInfo
import android.view.View
import android.view.Menu
import android.view.MenuItem
import android.support.v7.app.ActionBarActivity
import scala.util.{Try, Success, Failure}
import scala.concurrent._
object ResponseListActivity {
var plurk: Plurk = _
var user: User = _
val RequestPostResponse = 1
val RequestEditPlurk = 2
}
class ResponseListActivity extends ActionBarActivity with TypedViewHolder
with ResponseListFragment.Listener
with ConfirmDialog.Listener
with PlurkView.Listener
{
private implicit def activity = this
private var showWelcomeMessage = true
private lazy val dialogFrame = ToggleView.setupAngryBehavior(this, findView(TR.activityResponseListDialogFrame))
private lazy val fragmentContainer = findView(TR.activityResponseListFragmentContainer)
private lazy val plurkAPI = PlurkAPIHelper.getPlurkAPI(this)
private var responseListFragment: Option[ResponseListFragment] = None
override def onReplyTo(username: String, originContent: String) {
startReplyActivity(Some(username, originContent))
}
override def onCreate(savedInstanceState: Bundle) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_response_list)
dialogFrame.setMessages(
Message(MaidMaro.Half.Happy, getString(R.string.activityResponseListWelcome01)) :: Nil
)
responseListFragment match {
case Some(fragment) =>
fragment.plurk = ResponseListActivity.plurk
fragment.owner = ResponseListActivity.user
case None => updateFragment()
}
}
private def updateFragment() {
val fragment = new ResponseListFragment
fragment.plurk = ResponseListActivity.plurk
fragment.owner = ResponseListActivity.user
this.responseListFragment = Some(fragment)
getSupportFragmentManager.
beginTransaction.
replace(R.id.activityResponseListFragmentContainer, fragment).
commit()
}
override def onStart() {
super.onStart()
}
private def hasResponsePermission = {
(ResponseListActivity.plurk.ownerID == ResponseListActivity.plurk.userID)
(ResponseListActivity.plurk.whoIsCommentable == CommentSetting.Everyone) ||
(ResponseListActivity.plurk.whoIsCommentable == CommentSetting.OnlyFriends)
}
override def onCreateOptionsMenu(menu: Menu): Boolean = {
val inflater = getMenuInflater
inflater.inflate(R.menu.activity_response_list, menu)
super.onCreateOptionsMenu(menu)
}
override def onPrepareOptionsMenu(menu: Menu): Boolean = {
val isPostedByCurrentUser =
ResponseListActivity.plurk.ownerID == ResponseListActivity.plurk.userID
val replyButton = menu.findItem(R.id.activityResponseListActionReply)
replyButton.setEnabled(hasResponsePermission)
replyButton.setVisible(hasResponsePermission)
val editButton = menu.findItem(R.id.activityResponseListActionEdit)
val deleteButton = menu.findItem(R.id.activityResponseListActionDelete)
editButton.setEnabled(isPostedByCurrentUser)
editButton.setVisible(isPostedByCurrentUser)
deleteButton.setEnabled(isPostedByCurrentUser)
deleteButton.setVisible(isPostedByCurrentUser)
super.onPrepareOptionsMenu(menu)
}
override def onOptionsItemSelected(menuItem: MenuItem): Boolean = menuItem.getItemId match {
case R.id.activityResponseListActionReply => startReplyActivity() ; false
case R.id.activityResponseListActionEdit => startEditActivity() ; false
case R.id.activityResponseListActionDelete => showConfirmDeleteDialog() ; false
case R.id.activityResponseListActionLogout => logout(); false
case R.id.activityResponseListActionAbout => AboutActivity.startActivity(this); false
case R.id.activityResponseListActionToggleMaid => ToggleView(this, dialogFrame); false
case _ => super.onOptionsItemSelected(menuItem)
}
private def logout() {
Logout.logout(this)
}
private def showConfirmDeleteDialog() {
val dialog = ConfirmDialog.createDialog(
this,
'DeletePlurkConfirm,
getString(R.string.activityResponseListConfirmDeleteTitle),
getString(R.string.activityResponseListConfirmDelete),
getString(R.string.delete), getString(R.string.cancel)
)
dialog.show(getSupportFragmentManager, "DeletePlurkConfirm")
}
override def onDialogOKClicked(dialogName: Symbol, dialog: DialogInterface, data: Bundle) {
dialogName match {
case 'LogoutConfirm =>
dialog.dismiss()
this.finish()
Logout.doLogout(this)
case 'ExitConfirm =>
deletePlurk()
dialog.dismiss()
case 'DeletePlurkConfirm =>
deletePlurk()
case 'DeleteResponseConfirm =>
val plurkID = data.getLong("plurkID", -1)
val responseID = data.getLong("responseID", -1)
deleteResponse(plurkID, responseID)
case 'BlockUserResponseConfirm =>
val plurkID = data.getLong("plurkID", -1)
val responseID = data.getLong("responseID", -1)
val ownerID = data.getLong("ownerID", -1)
blockUserAndDeleteResponse(plurkID, responseID, ownerID)
}
}
private def blockUserAndDeleteResponse(plurkID: Long, responseID: Long, ownerID: Long) {
val oldRequestedOrientation = getRequestedOrientation
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LOCKED)
val progressDialogFragment = ProgressDialogFragment.createDialog(
getString(R.string.activityResponseListBlocking),
getString(R.string.pleaseWait)
)
progressDialogFragment.show(
getSupportFragmentManager.beginTransaction,
"deleteResponseProgress"
)
val deleteFuture = Future {
plurkAPI.Responses.responseDelete(plurkID, responseID).get
plurkAPI.Blocks.block(ownerID).get
}
deleteFuture.onSuccessInUI { _ =>
responseListFragment.foreach(_.deleteResponse(responseID))
progressDialogFragment.dismiss()
setRequestedOrientation(oldRequestedOrientation)
dialogFrame.setMessages(
Message(MaidMaro.Half.Happy, getString(R.string.activityResponseListBlockResponseOK)) ::
Nil
)
}
deleteFuture.onFailureInUI { case e: Exception =>
progressDialogFragment.dismiss()
setRequestedOrientation(oldRequestedOrientation)
DebugLog("====> onDeleteResponseFailure....", e)
dialogFrame.setMessages(
Message(MaidMaro.Half.Normal, getString(R.string.activityResponseListBlockResponseFailure01)) ::
Message(MaidMaro.Half.Normal, getString(R.string.activityResponseListBlockResponseFailure02).format(e.getMessage)) ::
Message(MaidMaro.Half.Smile, getString(R.string.activityResponseListBlockResponseFailure03)) ::
Nil
)
}
}
private def deleteResponse(plurkID: Long, responseID: Long) {
val oldRequestedOrientation = getRequestedOrientation
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LOCKED)
val progressDialogFragment = ProgressDialogFragment.createDialog(
getString(R.string.activityResponseListDeleteing),
getString(R.string.pleaseWait)
)
progressDialogFragment.show(
getSupportFragmentManager.beginTransaction,
"deleteResponseProgress"
)
val deleteFuture = Future {
plurkAPI.Responses.responseDelete(plurkID, responseID).get
}
deleteFuture.onSuccessInUI { _ =>
responseListFragment.foreach(_.deleteResponse(responseID))
progressDialogFragment.dismiss()
setRequestedOrientation(oldRequestedOrientation)
dialogFrame.setMessages(
Message(MaidMaro.Half.Happy, getString(R.string.activityResponseListDeleteResponseOK)) ::
Nil
)
}
deleteFuture.onFailureInUI { case e: Exception =>
progressDialogFragment.dismiss()
setRequestedOrientation(oldRequestedOrientation)
DebugLog("====> onDeleteResponseFailure....", e)
dialogFrame.setMessages(
Message(MaidMaro.Half.Normal, getString(R.string.activityResponseListDeleteResponseFailure01)) ::
Message(MaidMaro.Half.Normal, getString(R.string.activityResponseListDeleteResponseFailure02).format(e.getMessage)) ::
Message(MaidMaro.Half.Smile, getString(R.string.activityResponseListDeleteResponseFailure03)) ::
Nil
)
}
}
private def deletePlurk() {
val oldRequestedOrientation = getRequestedOrientation
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LOCKED)
val progressDialogFragment = ProgressDialogFragment.createDialog(
getString(R.string.activityResponseListDeleteing),
getString(R.string.pleaseWait)
)
progressDialogFragment.show(
getSupportFragmentManager.beginTransaction,
"deletePlurkProgress"
)
val deleteFuture = Future {
plurkAPI.Timeline.plurkDelete(ResponseListActivity.plurk.plurkID).get
}
deleteFuture.onSuccessInUI { _ =>
TimelineFragment.deletedPlurkIDHolder = Some(ResponseListActivity.plurk.plurkID)
progressDialogFragment.dismiss()
setRequestedOrientation(oldRequestedOrientation)
finish()
}
deleteFuture.onFailureInUI { case e: Exception =>
DebugLog("====> deletePlurkFailure....", e)
dialogFrame.setMessages(
Message(MaidMaro.Half.Normal, getString(R.string.activityResponseListDeletePlurkFailure01)) ::
Message(MaidMaro.Half.Normal, getString(R.string.activityResponseListDeletePlurkFailure02).format(e.getMessage)) ::
Message(MaidMaro.Half.Smile, getString(R.string.activityResponseListDeletePlurkFailure03)) :: Nil
)
progressDialogFragment.dismiss()
setRequestedOrientation(oldRequestedOrientation)
}
}
private def startReplyActivity(replyToInfo: Option[(String, String)] = None) {
val intent = new Intent(this, classOf[PostResponseActivity])
intent.putExtra(PostResponseActivity.PlurkIDBundle, ResponseListActivity.plurk.plurkID)
replyToInfo.foreach { case (nickname, originContent) =>
intent.putExtra(PostResponseActivity.NicknameBundle, nickname)
intent.putExtra(PostResponseActivity.OriginContentBundle, originContent)
}
startActivityForResult(intent, ResponseListActivity.RequestPostResponse)
}
override def startEditActivity(plurk: Plurk) {}
private def startEditActivity() {
val intent = new Intent(this, classOf[EditPlurkActivity])
intent.putExtra(EditPlurkActivity.PlurkIDBundle, ResponseListActivity.plurk.plurkID)
intent.putExtra(
EditPlurkActivity.ContentRawBundle,
ResponseListActivity.plurk.contentRaw getOrElse ""
)
startActivityForResult(intent, ResponseListActivity.RequestEditPlurk)
}
override def onGetResponseSuccess(responses: PlurkResponses) {
val dialog = responses.responses.size match {
case 0 =>
Message(MaidMaro.Half.Normal, getString(R.string.activityResponseListLow01)) ::
Message(MaidMaro.Half.Normal, getString(R.string.activityResponseListLow02)) ::
Nil
case n if n <= 50 =>
Message(MaidMaro.Half.Smile, getString(R.string.activityResponseListMid01)) ::
Message(MaidMaro.Half.Smile, getString(R.string.activityResponseListMid02)) ::
Nil
case n =>
Message(MaidMaro.Half.Happy, getString(R.string.activityResponseListHigh01).format(n)) ::
Message(MaidMaro.Half.Happy, getString(R.string.activityResponseListHigh02)) ::
Nil
}
if (showWelcomeMessage) {
dialogFrame.setMessages(dialog)
} else {
showWelcomeMessage = true
}
}
override def onGetResponseFailure(e: Exception) {
DebugLog("====> onGetResponseFailure....", e)
dialogFrame.setMessages(
Message(MaidMaro.Half.Normal, getString(R.string.activityResponseListGetResponseFailure01)) ::
Message(MaidMaro.Half.Normal, getString(R.string.activityResponseListGetResponseFailure02).format(e.getMessage)) ::
Message(MaidMaro.Half.Smile, getString(R.string.activityResponseListGetResponseFailure03)) ::
Nil
)
}
override def onActivityResult(requestCode: Int, resultCode: Int, data: Intent) {
super.onActivityResult(requestCode, resultCode, data)
requestCode match {
case ResponseListActivity.RequestPostResponse if resultCode == Activity.RESULT_OK =>
showWelcomeMessage = false
updateFragment()
dialogFrame.setMessages(
Message(MaidMaro.Half.Happy, getString(R.string.activityResponseListPosted01)) ::
Message(MaidMaro.Half.Smile, getString(R.string.activityResponseListPosted02)) ::
Nil
)
case ResponseListActivity.RequestEditPlurk if resultCode == Activity.RESULT_OK =>
DebugLog("====> XXXXXXX")
val plurkID = data.getLongExtra(EditPlurkActivity.PlurkIDBundle, -1)
val newContent = data.getStringExtra(EditPlurkActivity.EditedContentBundle)
val newContentRaw = Option(data.getStringExtra(EditPlurkActivity.EditedContentRawBundle))
if (plurkID != -1) {
PlurkView.updatePlurk(plurkID, newContent, newContentRaw)
ResponseListActivity.plurk = ResponseListActivity.plurk.copy(
content = newContent, contentRaw = newContentRaw
)
updateFragment()
}
case _ =>
}
}
override def onResume() {
super.onResume()
ToggleView.syncDialogVisibility(this, dialogFrame)
}
def linkCopied() {
dialogFrame.setMessages(
Message(MaidMaro.Half.Happy, getString(R.string.maidLinkCopied)) ::
Nil
)
}
def contentCopied() {
dialogFrame.setMessages(
Message(MaidMaro.Half.Happy, getString(R.string.maidContentCopied)) ::
Nil
)
}
}
|
brianhsu/MaidroidPlurk
|
src/main/scala/activity/ResponseListActivity.scala
|
Scala
|
gpl-3.0
| 14,371
|
package nodes.util
import org.apache.spark.rdd.RDD
import pipelines.{Logging, Transformer}
import scala.reflect.ClassTag
/**
* Caches the intermediate state of a node. Follows Spark's lazy evaluation conventions.
* @param name An optional name to set on the cached output. Useful for debugging.
* @tparam T Type of the input to cache.
*/
class Cacher[T: ClassTag](name: Option[String] = None) extends Transformer[T,T] with Logging {
override def apply(in: RDD[T]): RDD[T] = {
logInfo(s"CACHING ${in.id}")
name match {
case Some(x) => in.cache().setName(x)
case None => in.cache()
}
}
def apply(in: T): T = in
}
|
etrain/keystone
|
src/main/scala/nodes/util/Cacher.scala
|
Scala
|
apache-2.0
| 649
|
/**
* © 2019 Refinitiv. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package k.grid.registration
import scala.concurrent.duration._
import akka.actor.Actor
import akka.actor.Actor.Receive
import com.typesafe.scalalogging.LazyLogging
import k.grid._
import k.grid.registration.messages.{GridTopology, RegistrationPing}
import scala.concurrent.ExecutionContext.Implicits.global
/**
* Created by michael on 3/21/16.
*/
object LocalRegistrationManager {
val name = "LocalGossipManager"
// todo: change to Map[(Host, IdentityName),GridJvm]
private[LocalRegistrationManager] var _jvms = Set.empty[GridJvm]
private[LocalRegistrationManager] var _regFails = 0
def registrationFailure = _regFails == Config.possibleRegFails
def jvms = _jvms
}
class LocalRegistrationManager extends Actor with LazyLogging {
private case object SendGossipPing
private case object IncreaseRegFails
val isController = Grid.isController
def registrationCoordinator = Grid.selectSingleton(RegistrationCoordinator.name, None, Grid.clusterProxy)
@throws[Exception](classOf[Exception])
override def preStart(): Unit = {
context.system.scheduler.schedule(5.seconds, 5.seconds, self, SendGossipPing)
context.system.scheduler.schedule(30.seconds, 30.seconds, self, IncreaseRegFails)
}
override def receive: Receive = {
case SendGossipPing => registrationCoordinator ! RegistrationPing(Grid.thisMember)
case GridTopology(jvms) =>
LocalRegistrationManager._regFails = 0
val jvmsJoined = jvms -- LocalRegistrationManager._jvms
val jvmsLeft = LocalRegistrationManager._jvms -- jvms
LocalRegistrationManager._jvms = jvms
// send the data to the client actor so it can forward it to its subscribers.
Grid.selectActor(ClientActor.name, Grid.thisMember) ! JvmMembershipReport(jvmsJoined, jvmsLeft)
logger.debug(s"Current jvms: $jvms")
case IncreaseRegFails =>
LocalRegistrationManager._regFails = Math.min(LocalRegistrationManager._regFails + 1, Config.possibleRegFails)
}
}
|
dudi3001/CM-Well
|
server/cmwell-grid/src/main/scala/k/grid/registration/LocalRegistrationManager.scala
|
Scala
|
apache-2.0
| 2,599
|
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.consumers
import minitest.TestSuite
import monix.execution.exceptions.DummyException
import monix.execution.schedulers.TestScheduler
import monix.reactive.{Consumer, Observable}
import scala.util.{Failure, Success}
object FoldLeftConsumerSuite extends TestSuite[TestScheduler] {
def setup(): TestScheduler = TestScheduler()
def tearDown(s: TestScheduler): Unit = {
assert(s.state.tasks.isEmpty, "TestScheduler should have no pending tasks")
}
test("should sum a long stream") { implicit s =>
val count = 10000L
val obs = Observable.range(0, count)
val f = obs.consumeWith(Consumer.foldLeft(0L)(_ + _)).runToFuture
s.tick()
assertEquals(f.value, Some(Success(count * (count - 1) / 2)))
}
test("should interrupt with error") { implicit s =>
val ex = DummyException("dummy")
val obs = Observable.range(0, 10000).endWithError(ex)
val f = obs.consumeWith(Consumer.foldLeft(0L)(_ + _)).runToFuture
s.tick()
assertEquals(f.value, Some(Failure(ex)))
}
test("should protect against user error") { implicit s =>
val ex = DummyException("dummy")
val f = Observable
.now(1)
.consumeWith(Consumer.foldLeft(0L)((_, _) => throw ex))
.runToFuture
s.tick()
assertEquals(f.value, Some(Failure(ex)))
}
}
|
monix/monix
|
monix-reactive/shared/src/test/scala/monix/reactive/consumers/FoldLeftConsumerSuite.scala
|
Scala
|
apache-2.0
| 1,987
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.test.integration
import java.util
import java.util.Properties
import java.util.concurrent.{CountDownLatch, TimeUnit}
import javax.security.auth.login.Configuration
import kafka.admin.AdminUtils
import kafka.consumer.{Consumer, ConsumerConfig}
import kafka.message.MessageAndMetadata
import kafka.server.{KafkaConfig, KafkaServer}
import kafka.utils.{CoreUtils, TestUtils, ZkUtils}
import kafka.zk.EmbeddedZookeeper
import org.apache.kafka.clients.producer.{KafkaProducer, Producer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.protocol.SecurityProtocol
import org.apache.kafka.common.security.JaasUtils
import org.apache.samza.Partition
import org.apache.samza.checkpoint.Checkpoint
import org.apache.samza.config._
import org.apache.samza.container.TaskName
import org.apache.samza.context.Context
import org.apache.samza.job.local.ThreadJobFactory
import org.apache.samza.job.model.{ContainerModel, JobModel}
import org.apache.samza.job.{ApplicationStatus, JobRunner, StreamJob}
import org.apache.samza.metrics.MetricsRegistryMap
import org.apache.samza.storage.ChangelogStreamManager
import org.apache.samza.system.kafka.TopicMetadataCache
import org.apache.samza.system.{IncomingMessageEnvelope, SystemStreamPartition}
import org.apache.samza.task._
import org.apache.samza.util.{ClientUtilTopicMetadataStore, KafkaUtil, TopicMetadataStore}
import org.junit.Assert._
import scala.collection.JavaConverters._
import scala.collection.mutable.{ArrayBuffer, Buffer, HashMap, SynchronizedMap}
/*
* This creates an singleton instance of TestBaseStreamTask and implement the helper functions to
* 1. start the local ZooKeeper server
* 2. start the local Kafka brokers
* 3. create and validate test topics
* 4. shutdown servers and cleanup test directories and files
*/
object StreamTaskTestUtil {
val INPUT_TOPIC = "input"
val TOTAL_TASK_NAMES = 1
val REPLICATION_FACTOR = 3
val zkConnectionTimeout = 6000
val zkSessionTimeout = 6000
var zkUtils: ZkUtils = null
var zookeeper: EmbeddedZookeeper = null
var brokers: String = null
def zkPort: Int = zookeeper.port
def zkConnect: String = s"127.0.0.1:$zkPort"
var producer: Producer[Array[Byte], Array[Byte]] = null
val cp1 = new Checkpoint(Map(new SystemStreamPartition("kafka", "topic", new Partition(0)) -> "123").asJava)
val cp2 = new Checkpoint(Map(new SystemStreamPartition("kafka", "topic", new Partition(0)) -> "12345").asJava)
var metadataStore: TopicMetadataStore = null
/*
* This is the default job configuration. Each test class can override the default configuration below.
*/
var jobConfig = Map(
"job.factory.class" -> classOf[ThreadJobFactory].getCanonicalName,
"job.coordinator.system" -> "kafka",
ApplicationConfig.PROCESSOR_ID -> "1",
"task.inputs" -> "kafka.input",
"serializers.registry.string.class" -> "org.apache.samza.serializers.StringSerdeFactory",
"systems.kafka.samza.factory" -> "org.apache.samza.system.kafka.KafkaSystemFactory",
// Always start consuming at offset 0. This avoids a race condition between
// the producer and the consumer in this test (SAMZA-166, SAMZA-224).
"systems.kafka.samza.offset.default" -> "oldest", // applies to a nonempty topic
"systems.kafka.consumer.auto.offset.reset" -> "smallest", // applies to an empty topic
"systems.kafka.samza.msg.serde" -> "string",
// Since using state, need a checkpoint manager
"task.checkpoint.factory" -> "org.apache.samza.checkpoint.kafka.KafkaCheckpointManagerFactory",
"task.checkpoint.system" -> "kafka",
"task.checkpoint.replication.factor" -> "1",
// However, don't have the inputs use the checkpoint manager
// since the second part of the test expects to replay the input streams.
"systems.kafka.streams.input.samza.reset.offset" -> "false")
def apply(map: Map[String, String]): Unit = {
jobConfig ++= map
TestTask.reset()
}
var servers: Buffer[KafkaServer] = null
def beforeSetupServers {
zookeeper = new EmbeddedZookeeper()
zkUtils = ZkUtils(zkConnect, zkSessionTimeout, zkConnectionTimeout, JaasUtils.isZkSecurityEnabled())
val props = TestUtils.createBrokerConfigs(3, zkConnect, true)
val configs = props.map(p => {
p.setProperty("auto.create.topics.enable","false")
KafkaConfig.fromProps(p)
})
servers = configs.map(TestUtils.createServer(_)).toBuffer
brokers = TestUtils.getBrokerListStrFromServers(servers, SecurityProtocol.PLAINTEXT)
// setup the zookeeper and bootstrap servers for local kafka cluster
jobConfig ++= Map("systems.kafka.consumer.zookeeper.connect" -> zkConnect,
"systems.kafka.producer.bootstrap.servers" -> brokers)
val config = new util.HashMap[String, String]()
config.put("bootstrap.servers", brokers)
config.put("request.required.acks", "-1")
config.put("serializer.class", "kafka.serializer.StringEncoder")
config.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "1")
config.put(ProducerConfig.RETRIES_CONFIG, (new Integer(Integer.MAX_VALUE-1)).toString())
config.put(ProducerConfig.LINGER_MS_CONFIG, "0")
val producerConfig = new KafkaProducerConfig("kafka", "i001", config)
producer = new KafkaProducer[Array[Byte], Array[Byte]](producerConfig.getProducerProperties)
metadataStore = new ClientUtilTopicMetadataStore(brokers, "some-job-name")
createTopics
validateTopics
}
def createTopics {
AdminUtils.createTopic(
zkUtils,
INPUT_TOPIC,
TOTAL_TASK_NAMES,
REPLICATION_FACTOR)
}
def validateTopics {
val topics = Set(INPUT_TOPIC)
var done = false
var retries = 0
while (!done && retries < 100) {
try {
val topicMetadataMap = TopicMetadataCache.getTopicMetadata(topics, "kafka", metadataStore.getTopicInfo)
topics.foreach(topic => {
val topicMetadata = topicMetadataMap(topic)
KafkaUtil.maybeThrowException(topicMetadata.error.exception())
})
done = true
} catch {
case e: Exception =>
System.err.println("Got exception while validating test topics. Waiting and retrying.", e)
retries += 1
Thread.sleep(500)
}
}
if (retries >= 100) {
fail("Unable to successfully create topics. Tried to validate %s times." format retries)
}
}
def afterCleanLogDirs {
servers.foreach(_.shutdown())
servers.foreach(server => CoreUtils.delete(server.config.logDirs))
if (zkUtils != null)
CoreUtils.swallow(zkUtils.close())
if (zookeeper != null)
CoreUtils.swallow(zookeeper.shutdown())
Configuration.setConfiguration(null)
}
}
/* This class implement the base utility to implement an integration test for StreamTask
* It implements helper functions to start/stop the job, send messages to a task, and read all messages from a topic
*/
class StreamTaskTestUtil {
import StreamTaskTestUtil._
/**
* Start a job for TestTask, and do some basic sanity checks around startup
* time, number of partitions, etc.
*/
def startJob = {
// Start task.
val jobRunner = new JobRunner(new MapConfig(jobConfig.asJava))
val job = jobRunner.run()
createStreams
assertEquals(ApplicationStatus.Running, job.waitForStatus(ApplicationStatus.Running, 60000))
TestTask.awaitTaskRegistered
val tasks = TestTask.tasks
assertEquals("Should only have a single partition in this task", 1, tasks.size)
val task = tasks.values.toList.head
task.initFinished.await(60, TimeUnit.SECONDS)
assertEquals(0, task.initFinished.getCount)
(job, task)
}
/**
* Kill a job, and wait for an unsuccessful finish (since this throws an
* interrupt, which is forwarded on to ThreadJob, and marked as a failure).
*/
def stopJob(job: StreamJob) {
// make sure we don't kill the job before it was started.
// eventProcesses guarantees all the consumers have been initialized
val tasks = TestTask.tasks
val task = tasks.values.toList.head
task.eventProcessed.await(60, TimeUnit.SECONDS)
assertEquals(0, task.eventProcessed.getCount)
// Shutdown task.
job.kill
val status = job.waitForFinish(60000)
assertEquals(ApplicationStatus.UnsuccessfulFinish, status)
}
/**
* Send a message to the input topic, and validate that it gets to the test task.
*/
def send(task: TestTask, msg: String) {
producer.send(new ProducerRecord(INPUT_TOPIC, msg.getBytes)).get()
task.awaitMessage
assertEquals(msg, task.received.last)
}
/**
* Read all messages from a topic starting from last saved offset for group.
* To read all from offset 0, specify a unique, new group string.
*/
def readAll(topic: String, maxOffsetInclusive: Int, group: String): List[String] = {
val props = new Properties
props.put("zookeeper.connect", zkConnect)
props.put("group.id", group)
props.put("auto.offset.reset", "smallest")
val consumerConfig = new ConsumerConfig(props)
val consumerConnector = Consumer.create(consumerConfig)
val stream = consumerConnector.createMessageStreams(Map(topic -> 1))(topic).head.iterator
var message: MessageAndMetadata[Array[Byte], Array[Byte]] = null
var messages = ArrayBuffer[String]()
while (message == null || message.offset < maxOffsetInclusive) {
message = stream.next
if (message.message == null) {
messages += null
} else {
messages += new String(message.message, "UTF-8")
}
System.err.println("StreamTaskTestUtil.readAll(): offset=%s, message=%s" format (message.offset, messages.last))
}
consumerConnector.shutdown
messages.toList
}
def createStreams {
val mapConfig = new MapConfig(jobConfig.asJava)
val containers = new util.HashMap[String, ContainerModel]()
val jobModel = new JobModel(mapConfig, containers)
jobModel.maxChangeLogStreamPartitions = 1
val taskConfig = new TaskConfig(jobModel.getConfig)
val checkpointManager = taskConfig.getCheckpointManager(new MetricsRegistryMap())
checkpointManager match {
case Some(checkpointManager) => {
checkpointManager.createResources
checkpointManager.stop
}
case _ => assert(checkpointManager != null, "No checkpoint manager factory configured")
}
ChangelogStreamManager.createChangelogStreams(jobModel.getConfig, jobModel.maxChangeLogStreamPartitions)
}
}
object TestTask {
val tasks = new HashMap[TaskName, TestTask] with SynchronizedMap[TaskName, TestTask]
var totalTasks = 1
@volatile var allTasksRegistered = new CountDownLatch(totalTasks)
def reset(): Unit = {
TestTask.totalTasks = StreamTaskTestUtil.TOTAL_TASK_NAMES
TestTask.allTasksRegistered = new CountDownLatch(TestTask.totalTasks)
}
/**
* Static method that tasks can use to register themselves with. Useful so
* we don't have to sneak into the ThreadJob/SamzaContainer to get our test
* tasks.
*/
def register(taskName: TaskName, task: TestTask) {
tasks += taskName -> task
allTasksRegistered.countDown
}
def awaitTaskRegistered {
allTasksRegistered.await(60, TimeUnit.SECONDS)
assertEquals(0, allTasksRegistered.getCount)
assertEquals(totalTasks, tasks.size)
// Reset the registered latch, so we can use it again every time we start a new job.
TestTask.allTasksRegistered = new CountDownLatch(TestTask.totalTasks)
}
}
/**
* This class defines the base class for StreamTask used in integration test
* It implements some basic hooks for synchronization between the test class and the tasks
*/
abstract class TestTask extends StreamTask with InitableTask {
var received = ArrayBuffer[String]()
val initFinished = new CountDownLatch(1)
val eventProcessed = new CountDownLatch(1)
@volatile var gotMessage = new CountDownLatch(1)
def init(context: Context) {
TestTask.register(context.getTaskContext.getTaskModel.getTaskName, this)
testInit(context)
initFinished.countDown()
}
def process(envelope: IncomingMessageEnvelope, collector: MessageCollector, coordinator: TaskCoordinator) {
val msg = envelope.getMessage.asInstanceOf[String]
eventProcessed.countDown()
System.err.println("TestTask.process(): %s" format msg)
received += msg
testProcess(envelope, collector, coordinator)
// Notify sender that we got a message.
gotMessage.countDown
}
def awaitMessage {
assertTrue("Timed out of waiting for message rather than received one.", gotMessage.await(60, TimeUnit.SECONDS))
assertEquals(0, gotMessage.getCount)
gotMessage = new CountDownLatch(1)
}
def testInit(context: Context)
def testProcess(envelope: IncomingMessageEnvelope, collector: MessageCollector, coordinator: TaskCoordinator)
}
|
bharathkk/samza
|
samza-test/src/test/scala/org/apache/samza/test/integration/StreamTaskTestUtil.scala
|
Scala
|
apache-2.0
| 13,656
|
package conf
import org.apache.spark.streaming.{ Seconds, StreamingContext }
import org.apache.spark.{ SparkConf, SparkContext }
object Configuration {
private val ContextMaster = "local"
private val AppName = "My Application"
val sparkConf = new SparkConf().setMaster(ContextMaster).setAppName(AppName)
}
/**
* Created by dbhatia on 8/11/14.
*/
object SCFactory {
val sc = new SparkContext(Configuration.sparkConf)
val ssc = new StreamingContext(Configuration.sparkConf, Seconds(1))
}
|
deeptibhatia/spark-app
|
src/main/scala/conf/Configuration.scala
|
Scala
|
mit
| 504
|
package epic.parser
package models
import scala.collection.mutable.HashMap
import scala.util.Random
import scala.collection.GenTraversable
import breeze.features.FeatureVector
import breeze.linalg._
import breeze.util.Index
import epic.constraints.ChartConstraints
import epic.dense.IdentityTransform
import epic.dense.OutputTransform
import epic.dense.Transform
import epic.dense.Word2VecDepFeaturizerIndexed
import epic.dense.Word2VecSurfaceFeaturizerIndexed
import epic.features._
import epic.framework.Feature
import epic.framework.StandardExpectedCounts
import epic.lexicon.Lexicon
import epic.parser.projections.GrammarRefinements
import epic.trees._
import scala.collection.mutable.ArrayBuffer
/**
* Main neural CRF parser class.
*
* @author gdurrett
**/
@SerialVersionUID(1L)
class PositionalNeuralModel[L, L2, W](annotator: (BinarizedTree[L], IndexedSeq[W]) => BinarizedTree[IndexedSeq[L2]],
val constrainer: ChartConstraints.Factory[L, W],
val topology: RuleTopology[L],
val lexicon: Lexicon[L, W],
refinedTopology: RuleTopology[L2],
refinements: GrammarRefinements[L, L2],
labelFeaturizer: RefinedFeaturizer[L, W, Feature],
surfaceFeaturizer: Word2VecSurfaceFeaturizerIndexed[W],
depFeaturizer: Word2VecDepFeaturizerIndexed[W],
val transforms: IndexedSeq[OutputTransform[Array[Int],DenseVector[Double]]],
val maybeSparseSurfaceFeaturizer: Option[IndexedSpanFeaturizer[L, L2, W]],
val depTransforms: Seq[OutputTransform[Array[Int],DenseVector[Double]]],
val decoupledTransforms: Seq[OutputTransform[Array[Int],DenseVector[Double]]]) extends ParserModel[L, W] with Serializable {
def mergeWeightsForEnsembling(x1: DenseVector[Double], x2: DenseVector[Double]) = {
require(decoupledTransforms.isEmpty)
require(x1.size == x2.size)
// Stack up the dense parts, average the sparse parts
if (maybeSparseSurfaceFeaturizer.isDefined) {
val sparseFeatsStart = index.componentOffset(index.indices.size - 1)
val summedSparseFeatures = x1(sparseFeatsStart to -1) + x2(sparseFeatsStart to -1)
DenseVector.vertcat(x1(0 until sparseFeatsStart), x2(0 until sparseFeatsStart), summedSparseFeatures)
} else {
DenseVector.vertcat(x1, x2)
}
}
def cloneModelForEnsembling = {
require(decoupledTransforms.isEmpty)
// Note that duping the transforms is okay because they still produce distinct
// layers, so caching behavior is unaffected
val newTransforms = transforms ++ transforms
val newDepTransforms = depTransforms ++ depTransforms
new PositionalNeuralModel(annotator, constrainer, topology, lexicon, refinedTopology, refinements, labelFeaturizer, surfaceFeaturizer, depFeaturizer,
newTransforms, maybeSparseSurfaceFeaturizer, newDepTransforms, decoupledTransforms)
}
override type Inference = PositionalNeuralModel.Inference[L, L2, W]
override def accumulateCounts(inf: Inference, s: Scorer, d: TreeInstance[L, W], m: Marginal, accum: ExpectedCounts, scale: Double): Unit = {
// println("Extracting ecounts")
inf.grammar.extractEcounts(m, accum.counts, scale)
if (maybeSparseSurfaceFeaturizer.isDefined) {
val f = maybeSparseSurfaceFeaturizer.get
val innerAccum = StandardExpectedCounts.zero(f.index)
m.expectedCounts(maybeSparseSurfaceFeaturizer.get, innerAccum, scale)
// val totalTransformSize = transform.index.size
val totalTransformSize = transforms.map(_.index.size).sum + depTransforms.map(_.index.size).sum + decoupledTransforms.map(_.index.size).sum
accum.counts += DenseVector.vertcat(DenseVector.zeros[Double](totalTransformSize), innerAccum.counts)
}
// println("Ecounts extracted")
accum.loss += scale * m.logPartition
}
/**
* Models have features, and this defines the mapping from indices in the weight vector to features.
* @return
*/
val index = if (maybeSparseSurfaceFeaturizer.isDefined) {
SegmentedIndex(transforms.map(_.index) ++ depTransforms.map(_.index) ++ decoupledTransforms.map(_.index) ++ IndexedSeq(maybeSparseSurfaceFeaturizer.get.index):_*)
} else {
SegmentedIndex(transforms.map(_.index) ++ depTransforms.map(_.index) ++ decoupledTransforms.map(_.index):_*)
}
def initialWeightVector(initWeightsScale: Double, initializerSpec: String, trulyRandom: Boolean = false): DenseVector[Double] = {
val rng = if (trulyRandom) new Random() else new Random(0)
val initTransformWeights = DenseVector.vertcat(transforms.map(_.initialWeightVector(initWeightsScale, rng, true, initializerSpec)):_*)
val initDepWeights = DenseVector.vertcat(depTransforms.map(_.initialWeightVector(initWeightsScale, rng, true, initializerSpec)):_*)
val initDecoupledWeights = DenseVector.vertcat(decoupledTransforms.map(_.initialWeightVector(initWeightsScale, rng, true, initializerSpec)):_*)
val newInitVector: DenseVector[Double] = if (maybeSparseSurfaceFeaturizer.isDefined) {
DenseVector.vertcat(initTransformWeights, initDepWeights, initDecoupledWeights, DenseVector.zeros(maybeSparseSurfaceFeaturizer.get.index.size))
} else {
DenseVector.vertcat(initTransformWeights, initDepWeights, initDecoupledWeights)
}
require(newInitVector.size == index.size, newInitVector.size + " " + index.size)
newInitVector
}
override def featureIndex: Index[Feature] = index
override def inferenceFromWeights(weights: DenseVector[Double]): Inference = inferenceFromWeights(weights, true)
def inferenceFromWeights(weights: DenseVector[Double], forTrain: Boolean): Inference = {
val layersAndInnerLayers = transforms.indices.map { i =>
transforms(i).extractLayerAndPenultimateLayer(weights(index.componentOffset(i) until index.componentOffset(i) + index.indices(i).size), forTrain)
}
val layers: IndexedSeq[OutputTransform[Array[Int],DenseVector[Double]]#OutputLayer] = layersAndInnerLayers.map(_._1)
val innerLayers: IndexedSeq[epic.dense.Transform.Layer[Array[Int],DenseVector[Double]]] = layersAndInnerLayers.map(_._2)
val depLayers: IndexedSeq[OutputTransform[Array[Int],DenseVector[Double]]#OutputLayer] = depTransforms.indices.map { i =>
val idxIdx = transforms.size + i
depTransforms(i).extractLayer(weights(index.componentOffset(idxIdx) until index.componentOffset(idxIdx) + index.indices(idxIdx).size), forTrain)
}
val decoupledLayersAndInner = decoupledTransforms.indices.map { i =>
val idxIdx = transforms.size + depTransforms.size + i
decoupledTransforms(i).extractLayerAndPenultimateLayer(weights(index.componentOffset(idxIdx) until index.componentOffset(idxIdx) + index.indices(idxIdx).size), forTrain)
}
val decoupledLayers = decoupledLayersAndInner.map(_._1)
val decoupledInnerLayers = decoupledLayersAndInner.map(_._2)
val grammar = new PositionalNeuralModel.PositionalNeuralGrammar[L, L2, W](topology, lexicon, refinedTopology, refinements, labelFeaturizer,
surfaceFeaturizer, depFeaturizer, layers, innerLayers, depLayers, maybeSparseSurfaceFeaturizer, decoupledLayers, decoupledInnerLayers, weights, this)
new Inference(annotator, constrainer, grammar, refinements)
}
/**
* When doing batch normalization, we need to normalize the test network
*/
def extractParser(weights: DenseVector[Double], trainExs: Seq[TreeInstance[L,W]])(implicit deb: Debinarizer[L]) = {
val inf = inferenceFromWeights(weights).forTesting
inf.relativizeToData(trainExs.slice(0, Math.min(trainExs.size, 200)).asInstanceOf[Seq[TreeInstance[AnnotatedLabel,String]]])
Parser(constrainer, inf.grammar, ChartDecoder[L, W]())
}
override def initialValueForFeature(f: Feature): Double = 0.0
}
object PositionalNeuralModel {
case class Inference[L, L2, W](annotator: (BinarizedTree[L], IndexedSeq[W]) => BinarizedTree[IndexedSeq[L2]],
constrainer: ChartConstraints.Factory[L, W],
grammar: PositionalNeuralGrammar[L, L2, W],
refinements: GrammarRefinements[L, L2]) extends ParserInference[L, W] {
override def goldMarginal(scorer: Scorer, ti: TreeInstance[L, W], aug: UnrefinedGrammarAnchoring[L, W]): Marginal = {
import ti._
val annotated = annotator(tree, words).map(_.map(refinements.labels.localize))
val product = grammar.anchor(words, constrainer.constraints(ti.words))
LatentTreeMarginal(product, annotated)
}
// This needs to be different for dropout, so that we can get the right layers
override def forTesting = grammar.origPTModel.inferenceFromWeights(grammar.weights, false)
def relativizeToData(data: GenTraversable[TreeInstance[AnnotatedLabel,String]]) {
}
}
@SerialVersionUID(4749637878577393596L)
class PositionalNeuralGrammar[L, L2, W](val topology: RuleTopology[L],
val lexicon: Lexicon[L, W],
val refinedTopology: RuleTopology[L2],
val refinements: GrammarRefinements[L, L2],
labelFeaturizer: RefinedFeaturizer[L, W, Feature],
val surfaceFeaturizer: Word2VecSurfaceFeaturizerIndexed[W],
depFeaturizer: Word2VecDepFeaturizerIndexed[W],
val layers: IndexedSeq[OutputTransform[Array[Int],DenseVector[Double]]#OutputLayer],
penultimateLayers: IndexedSeq[epic.dense.Transform.Layer[Array[Int],DenseVector[Double]]],
depLayers: IndexedSeq[OutputTransform[Array[Int],DenseVector[Double]]#OutputLayer],
val maybeSparseSurfaceFeaturizer: Option[IndexedSpanFeaturizer[L, L2, W]],
decoupledLayers: IndexedSeq[OutputTransform[Array[Int],DenseVector[Double]]#OutputLayer],
penultimateDecoupledLayers: IndexedSeq[epic.dense.Transform.Layer[Array[Int],DenseVector[Double]]],
val weights: DenseVector[Double],
val origPTModel: PositionalNeuralModel[L,L2,W]) extends Grammar[L, W] with Serializable {
val SpanLayerIdx = 0
val UnaryLayerIdx = 1
val BinaryLayerIdx = 2
val dcSpanFeatOffset = layers.map(_.index.size).sum + depLayers.map(_.index.size).sum
val dcUnaryFeatOffset = dcSpanFeatOffset + (if (decoupledLayers.nonEmpty) decoupledLayers(0).index.size else 0)
val dcBinaryFeatOffset = dcUnaryFeatOffset + (if (decoupledLayers.nonEmpty) decoupledLayers(1).index.size else 0)
override def withPermissiveLexicon: Grammar[L, W] = {
new PositionalNeuralGrammar(topology, lexicon.morePermissive, refinedTopology, refinements, labelFeaturizer, surfaceFeaturizer,
depFeaturizer, layers, penultimateLayers, depLayers, maybeSparseSurfaceFeaturizer, decoupledLayers, penultimateDecoupledLayers, weights, origPTModel)
}
/**
* N.B. does not extracted expected counts for sparse features; this is done outside this loop
*/
def extractEcounts(m: ParseMarginal[L, W], deriv: DenseVector[Double], scale: Double): Unit = {
val w = m.words
val length = w.length
val sspec = surfaceFeaturizer.anchor(w)
val depSpec = depFeaturizer.anchor(w)
val lspec = labelFeaturizer.anchor(w)
// val maxTetraLen = ((w.size + 2) * (w.size + 3) * (w.size + 4))/6 + ((w.size + 1) * (w.size + 2))/2 + w.size + 2
def tetra(begin: Int, split: Int, end: Int) = {
(end * (end + 1) * (end + 2))/6 + ((split + 1) * split / 2 + begin)
}
// This representation appears to make things a bit faster?
val ruleCountsPerState = new HashMap[Int,SparseVector[Double]]
val unaryRuleCountsPerState = new HashMap[Int,SparseVector[Double]]
val binaryRuleCountsPerState = new HashMap[Int,SparseVector[Double]]
val spanCountsPerState = new HashMap[Int,SparseVector[Double]]
// val ruleCountsPerState = Array.fill(maxTetraLen)(SparseVector.zeros[Double](labelFeaturizer.index.size))
// val countsPerHeadDepPair = Array.tabulate(w.size, w.size)((i, j) => 0.0)
// val statesUsed = Array.fill(maxTetraLen)(false)
// val untetra = Array.fill(maxTetraLen)((-1, -1, -1))
val untetra = new HashMap[Int,(Int,Int,Int)]
m visit new AnchoredVisitor[L] {
override def visitUnaryRule(begin: Int, end: Int, rule: Int, ref: Int, score: Double): Unit = {
val tetraIdx = tetra(begin, end, length + 1)
untetra(tetraIdx) = (begin, end, length + 1)
val fv = new FeatureVector(lspec.featuresForUnaryRule(begin, end, rule, ref))
if (!ruleCountsPerState.contains(tetraIdx)) ruleCountsPerState.put(tetraIdx, SparseVector.zeros[Double](labelFeaturizer.index.size))
axpy(score, fv, ruleCountsPerState(tetraIdx))
if (decoupledLayers.nonEmpty) {
if (!unaryRuleCountsPerState.contains(tetraIdx)) unaryRuleCountsPerState.put(tetraIdx, SparseVector.zeros[Double](labelFeaturizer.index.size))
axpy(score, fv, unaryRuleCountsPerState(tetraIdx))
}
}
override def visitSpan(begin: Int, end: Int, tag: Int, ref: Int, score: Double): Unit = {
val tetraIdx = tetra(begin, end, length + 2)
untetra(tetraIdx) = (begin, end, length + 2)
val fv = new FeatureVector(lspec.featuresForSpan(begin, end, tag, ref))
if (!ruleCountsPerState.contains(tetraIdx)) ruleCountsPerState.put(tetraIdx, SparseVector.zeros[Double](labelFeaturizer.index.size))
axpy(score, fv, ruleCountsPerState(tetraIdx))
if (decoupledLayers.nonEmpty) {
if (!spanCountsPerState.contains(tetraIdx)) spanCountsPerState.put(tetraIdx, SparseVector.zeros[Double](labelFeaturizer.index.size))
axpy(score, fv, spanCountsPerState(tetraIdx))
}
}
override def visitBinaryRule(begin: Int, split: Int, end: Int, rule: Int, ref: Int, score: Double): Unit = {
val tetraIdx = tetra(begin, split, end)
untetra(tetraIdx) = (begin, split, end)
val fv = new FeatureVector(lspec.featuresForBinaryRule(begin, split, end, rule, ref))
if (!ruleCountsPerState.contains(tetraIdx)) ruleCountsPerState.put(tetraIdx, SparseVector.zeros[Double](labelFeaturizer.index.size))
axpy(score, fv, ruleCountsPerState(tetraIdx))
if (decoupledLayers.nonEmpty) {
if (!binaryRuleCountsPerState.contains(tetraIdx)) binaryRuleCountsPerState.put(tetraIdx, SparseVector.zeros[Double](labelFeaturizer.index.size))
axpy(score, fv, binaryRuleCountsPerState(tetraIdx))
}
}
}
for (key <- ruleCountsPerState.keySet) {
val (begin, split, end) = untetra(key)
val ffeats = if (end > length) sspec.featuresForSpan(begin, split) else sspec.featuresForSplit(begin, split, end)
var layerSizeTally = 0
layers.indices.foreach { j =>
layers(j).tallyDerivative(deriv(layerSizeTally until layerSizeTally + layers(j).index.size), { ruleCountsPerState(key) * scale }, ffeats)
layerSizeTally += layers(j).index.size
}
}
if (decoupledLayers.nonEmpty) {
for (key <- spanCountsPerState.keySet) {
val (begin, end, _) = untetra(key)
val ffeats = sspec.reducedFeaturesForSpan(begin, end)
decoupledLayers(SpanLayerIdx).tallyDerivative(deriv(dcSpanFeatOffset until dcSpanFeatOffset + decoupledLayers(SpanLayerIdx).index.size), { spanCountsPerState(key) * scale }, ffeats)
}
for (key <- unaryRuleCountsPerState.keySet) {
val (begin, end, _) = untetra(key)
val ffeats = sspec.reducedFeaturesForSpan(begin, end)
decoupledLayers(UnaryLayerIdx).tallyDerivative(deriv(dcUnaryFeatOffset until dcUnaryFeatOffset + decoupledLayers(UnaryLayerIdx).index.size), { unaryRuleCountsPerState(key) * scale }, ffeats)
}
for (key <- binaryRuleCountsPerState.keySet) {
val (begin, split, end) = untetra(key)
val ffeats = sspec.featuresForSplit(begin, split, end)
decoupledLayers(BinaryLayerIdx).tallyDerivative(deriv(dcBinaryFeatOffset until dcBinaryFeatOffset + decoupledLayers(BinaryLayerIdx).index.size), { binaryRuleCountsPerState(key) * scale }, ffeats)
}
}
}
def anchor(w: IndexedSeq[W], cons: ChartConstraints[L]):GrammarAnchoring[L, W] = new ProjectionsGrammarAnchoring[L, L2, W] {
override def addConstraints(constraints: ChartConstraints[L]): GrammarAnchoring[L, W] = {
anchor(w, cons & constraints)
}
override def sparsityPattern: ChartConstraints[L] = cons
def refinements = PositionalNeuralGrammar.this.refinements
def refinedTopology: RuleTopology[L2] = PositionalNeuralGrammar.this.refinedTopology
val topology = PositionalNeuralGrammar.this.topology
val lexicon = PositionalNeuralGrammar.this.lexicon
def words = w
val l = w.size
val maxTetraLen = ((l + 2) * (l + 3) * (l + 4))/6 + ((l + 1) * (l + 2))/2 + l + 2
// Doesn't make things faster to use HashMaps here
val cache = Array.tabulate(layers.size + decoupledLayers.size)(i => new Array[DenseVector[Double]](maxTetraLen))
val finalCache = Array.tabulate(layers.size + decoupledLayers.size)(i => new Array[SparseVector[Double]](maxTetraLen))
def getOrElseUpdate(layerIdx: Int, tetraIdx: Int, fun: => DenseVector[Double]) = {
if (cache(layerIdx)(tetraIdx) == null) cache(layerIdx)(tetraIdx) = fun
cache(layerIdx)(tetraIdx)
}
def getOrElseUpdateFinal(layerIdx: Int, tetraIdx: Int, rfeatIdx: Int, maxVectSize: Int, fun: => Double) = {
if (finalCache(layerIdx)(tetraIdx) == null) finalCache(layerIdx)(tetraIdx) = SparseVector.zeros(maxVectSize)
if (!finalCache(layerIdx)(tetraIdx).contains(rfeatIdx)) finalCache(layerIdx)(tetraIdx)(rfeatIdx) = fun
finalCache(layerIdx)(tetraIdx)(rfeatIdx)
}
val sspec = surfaceFeaturizer.anchor(w)
val depSpec = depFeaturizer.anchor(w)
val lspec = labelFeaturizer.anchor(w)
val fspec = if (maybeSparseSurfaceFeaturizer.isDefined) maybeSparseSurfaceFeaturizer.get.anchor(w) else null
val sparseFeatsStart = if (maybeSparseSurfaceFeaturizer.isDefined) layers.map(_.index.size).sum + depLayers.map(_.index.size).sum + decoupledLayers.map(_.index.size).sum else -1
private def tetra(begin: Int, split: Int, end: Int) = {
(end * (end + 1) * (end + 2))/6 + ((split + 1) * split / 2 + begin)
}
def scoreBinaryRule(begin: Int, split: Int, end: Int, rule: Int, ref: Int) = {
var total = 0.0
val tetraIdx = tetra(begin, split, end)
val rfeats = lspec.featuresForBinaryRule(begin, split, end, rule, ref)
layers.indices.foreach { layerIdx =>
val fs = getOrElseUpdate(layerIdx, tetraIdx, { penultimateLayers(layerIdx).activations(sspec.featuresForSplit(begin, split, end)) })
for (rfeat <- rfeats) {
total += getOrElseUpdateFinal(layerIdx, tetraIdx, rfeat, labelFeaturizer.index.size, { layers(layerIdx).activationsFromPenultimateDot(fs, rfeat) })
}
}
if (decoupledLayers.nonEmpty) {
val layerIdx = layers.size + BinaryLayerIdx
val fs = getOrElseUpdate(layerIdx, tetraIdx, { penultimateDecoupledLayers(BinaryLayerIdx).activations(sspec.featuresForSplit(begin, split, end)) })
for (rfeat <- rfeats) {
total += getOrElseUpdateFinal(layerIdx, tetraIdx, rfeat, labelFeaturizer.index.size, { decoupledLayers(BinaryLayerIdx).activationsFromPenultimateDot(fs, rfeat) })
}
}
if (maybeSparseSurfaceFeaturizer.isDefined) {
total += dot(fspec.featuresForBinaryRule(begin, split, end, rule, ref), sparseFeatsStart)
}
total
}
def scoreUnaryRule(begin: Int, end: Int, rule: Int, ref: Int) = {
var total = 0.0
val tetraIdx = tetra(begin, end, length + 1)
val rfeats = lspec.featuresForUnaryRule(begin, end, rule, ref)
layers.indices.foreach { layerIdx =>
val fs = getOrElseUpdate(layerIdx, tetraIdx, { penultimateLayers(layerIdx).activations(sspec.featuresForSpan(begin, end)) })
for (rfeat <- rfeats) {
total += getOrElseUpdateFinal(layerIdx, tetraIdx, rfeat, labelFeaturizer.index.size, { layers(layerIdx).activationsFromPenultimateDot(fs, rfeat) })
}
}
if (decoupledLayers.nonEmpty) {
val layerIdx = layers.size + UnaryLayerIdx
val fs = getOrElseUpdate(layerIdx, tetraIdx, { penultimateDecoupledLayers(UnaryLayerIdx).activations(sspec.reducedFeaturesForSpan(begin, end)) })
for (rfeat <- rfeats) {
total += getOrElseUpdateFinal(layerIdx, tetraIdx, rfeat, labelFeaturizer.index.size, { decoupledLayers(UnaryLayerIdx).activationsFromPenultimateDot(fs, rfeat) })
}
}
if (maybeSparseSurfaceFeaturizer.isDefined) {
total += dot(fspec.featuresForUnaryRule(begin, end, rule, ref), sparseFeatsStart)
}
total
}
def scoreSpan(begin: Int, end: Int, tag: Int, ref: Int) = {
var total = 0.0
val tetraIdx = tetra(begin, end, length + 2)
val rfeats = lspec.featuresForSpan(begin, end, tag, ref)
layers.indices.foreach { layerIdx =>
val fs = getOrElseUpdate(layerIdx, tetraIdx, { penultimateLayers(layerIdx).activations(sspec.featuresForSpan(begin, end)) })
for (rfeat <- rfeats) {
total += getOrElseUpdateFinal(layerIdx, tetraIdx, rfeat, labelFeaturizer.index.size, { layers(layerIdx).activationsFromPenultimateDot(fs, rfeat) })
}
}
if (decoupledLayers.nonEmpty) {
val layerIdx = layers.size + SpanLayerIdx
val fs = getOrElseUpdate(layerIdx, tetraIdx, { penultimateDecoupledLayers(SpanLayerIdx).activations(sspec.reducedFeaturesForSpan(begin, end)) })
for (rfeat <- rfeats) {
total += getOrElseUpdateFinal(layerIdx, tetraIdx, rfeat, labelFeaturizer.index.size, { decoupledLayers(SpanLayerIdx).activationsFromPenultimateDot(fs, rfeat) })
}
}
if (maybeSparseSurfaceFeaturizer.isDefined) {
total += dot(fspec.featuresForSpan(begin, end, tag, ref), sparseFeatsStart)
}
total
}
private def dot(features: Array[Int], sparseFeaturesOffset: Int) = {
var i = 0
var score = 0.0
val wdata = weights.data
while (i < features.length) {
score += wdata(features(i) + sparseFeaturesOffset)
i += 1
}
score
}
}
}
}
|
langkilde/epic
|
src/main/scala/epic/parser/models/PositionalNeuralModel.scala
|
Scala
|
apache-2.0
| 23,603
|
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala.runtime
abstract class AbstractFunction16[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, +R] extends Function16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R] {
}
|
cran/rkafkajars
|
java/scala/runtime/AbstractFunction16.scala
|
Scala
|
apache-2.0
| 835
|
package com.stefansavev.fuzzysearchtest
import java.io.StringReader
import com.stefansavev.TemporaryFolderFixture
import com.stefansavev.core.serialization.TupleSerializers._
import org.junit.runner.RunWith
import org.scalatest.{FunSuite, Matchers}
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class GloveUnitTest extends FunSuite with TemporaryFolderFixture with Matchers {
def readResource(name: String): String ={
val stream = getClass.getResourceAsStream(name)
val lines = scala.io.Source.fromInputStream( stream ).getLines
lines.mkString("\\n")
}
def parameterizedTest(inputTextFile: String, indexFile: String, numTrees: Int, expectedResultsName: String): Unit ={
val expectedResults = readResource(expectedResultsName).trim
val queryResults = GloveTest.run(inputTextFile, indexFile, numTrees).trim
assertResult(expectedResults)(queryResults)
}
//manually download http://nlp.stanford.edu/data/glove.6B.zip and unzip into test/resources/glove
//then enable the test
ignore("test glove num trees 1") {
val numTrees: Int = 1
val inputTextFile: String = "src/test/resources/glove/glove.6B.100d.txt"
val index = temporaryFolder.newFolder("index").getAbsolutePath
val expectedResultsResouceName = "/glove/expected_results_num_trees_1.txt"
parameterizedTest(inputTextFile, index, numTrees, expectedResultsResouceName)
}
ignore("test glove num trees 150") {
val numTrees: Int = 150
val inputTextFile: String = "src/test/resources/glove/glove.6B.100d.txt"
val index = temporaryFolder.newFolder("index").getAbsolutePath
val expectedResultsResouceName = "/glove/expected_results_num_trees_150.txt"
parameterizedTest(inputTextFile, index, numTrees, expectedResultsResouceName)
}
}
|
stefansavev/random-projections-at-berlinbuzzwords
|
src/test/scala/com/stefansavev/fuzzysearchtest/GloveUnitTest.scala
|
Scala
|
apache-2.0
| 1,789
|
package org.jetbrains.plugins.scala.lang.completion.postfix.templates.selector
import com.intellij.openapi.util.Condition
import com.intellij.psi.search.GlobalSearchScope
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiManager
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiManager.ClassCategory
import org.jetbrains.plugins.scala.lang.psi.types.{Boolean => BooleanType, ScType, ValType}
import org.jetbrains.plugins.scala.util.ScEquivalenceUtil
import scala.language.implicitConversions
/**
* @author Roman.Shein
* @since 08.09.2015.
*/
object SelectorConditions {
val BOOLEAN_EXPR = typedCondition(BooleanType)
val ANY_EXPR = new Condition[PsiElement] {
override def value(t: PsiElement): Boolean = t.isInstanceOf[ScExpression]
}
val THROWABLE = isDescendantCondition("java.lang.Throwable")
def isDescendantCondition(ancestorFqn: String) = new Condition[PsiElement]{
override def value(t: PsiElement): Boolean = t match {
case expr: ScExpression =>
val project = t.getProject
val manager = ScalaPsiManager.instance(project)
expr.getTypeIgnoreBaseType().toOption.flatMap{exprType => ScType.extractClass(exprType, Option(project)).map{ psiClass =>
val base = manager.getCachedClass(ancestorFqn, GlobalSearchScope.allScope(project), ClassCategory.ALL)
(psiClass != null && base != null && ScEquivalenceUtil.areClassesEquivalent(psiClass, base)) ||
manager.cachedDeepIsInheritor(psiClass, base)}}.getOrElse(false)
case _ => false
}
}
def typedCondition(myType: ValType) = new Condition[PsiElement]{
override def value(t: PsiElement): Boolean = t match {
case expr: ScExpression => expr.getTypeIgnoreBaseType().getOrAny == myType
case _ => false
}
}
class ExpandedCondition[T](source: Condition[T]) extends Condition[T] {
override def value(t: T): Boolean = source.value(t)
def ||(other: Condition[_ >: T]) = {
def f(t: T) = value(t) || other.value(t)
new Condition[T] {
override def value(t: T) = f(t)
}
}
def &&(other: Condition[_ >: T]) = {
def f(t: T) = value(t) && other.value(t)
new Condition[T] {
override def value(t: T) = f(t)
}
}
def a: Boolean = false
}
}
|
JetBrains/intellij-scala-historical
|
src/org/jetbrains/plugins/scala/lang/completion/postfix/templates/selector/SelectorConditions.scala
|
Scala
|
apache-2.0
| 2,401
|
package ca.aretex.labs.data
import java.text.{SimpleDateFormat, DecimalFormatSymbols, DecimalFormat}
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter
import java.time.temporal.{ChronoField, WeekFields}
import java.util.{Calendar, UUID, Locale}
import org.slf4j.{Logger, LoggerFactory}
import org.apache.commons.lang3.StringUtils
/**
* Created by Choungmo Fofack on 1/16/17.
*/
object TicketDataUtil {
private val logger: Logger = LoggerFactory.getLogger(TicketDataUtil.getClass)
val ticketFileProperties: String = "ticket.properties"
val THOUSAND = 1000
val SEED = 2000l
val THRESHOLD = 1e-9
val EMPTY_CHAR: String = "\\0"
val EMPTY_STRING = "empty"
val UNKNOWN_STRING = "unknown"
val NB_OUTPUT_PARTITIONS = 5
val NB_SAMPLES = 5
val LABEL_INIT = -1
val LABEL_ZERO = 0
val LABEL_ONE = 1
val LABEL_TWO = 2
val FORMAT_CSV = "com.databricks.spark.csv"
val FORMAT_PARQUET = "parquet"
val FORMAT_JSON = "json"
val FORMAT_TAR ="targz"
val FORMAT_DATE="yyyyMMdd"
val FORMATTER_NUMBER = new DecimalFormat("#.####", new DecimalFormatSymbols(Locale.US))
val FORMATTER_DATE = DateTimeFormatter.ofPattern(FORMAT_DATE)
val QUOTE = "\\""
val SLASH = "/"
val SINGLEPIPE_DELIMITER = "|"
val DOUBLEPIPE_DELIMITER = "||"
val SPACE_SEPARATOR = " "
val COLON_SEPARATOR = ":"
val TAB_SEPARATOR = "\\t"
val LINE_SEPARATOR = "\\n"
val SEMICOLON_SEPARATOR = ";"
val COMMA_SEPARATOR = ","
val SINGLEPIPE_REGEX = "\\\\|"
val DOUBLEPIPE_REGEX = "\\\\|\\\\|"
val NON_CAPTURING_REGEX = "?:"
val PATH_REGEX = "([a-zA-Z]+:/)?(/?[a-zA-Z0-9_.-]+)+"
def getDateAndInstant(): (String, Long) = {
val date = Calendar.getInstance().getTime
val instant = date.getTime
val ticketAppRunDate = new SimpleDateFormat(FORMAT_DATE).format(date)
(ticketAppRunDate, instant)
}
def getYearAndWeek(strdate: String): (Int, Int) = {
val date = LocalDateTime.parse(s"${strdate}_120000", FORMATTER_DATE)
(date.getYear, date.get(WeekFields.of(Locale.FRANCE).weekOfWeekBasedYear()))
}
def getYearMonthAndWeek(strdate: String): (Int, Int, Int) = {
val date = LocalDateTime.parse(s"${strdate}_120000", FORMATTER_DATE)
(date.getYear, date.getMonthValue, date.get(WeekFields.of(Locale.FRANCE).weekOfWeekBasedYear()))
}
def getFirstDateOfMonth(strdate: String): String = {
val date = LocalDateTime.parse(s"${strdate}_120000", FORMATTER_DATE).`with`(ChronoField.DAY_OF_MONTH , 1 )
date.toLocalDate.format(DateTimeFormatter.ofPattern("yyyyMMdd"))
}
def cleans(line: String, repl: String=TicketDataUtil.QUOTE): String =
StringUtils.removeEnd(StringUtils.removeStart(line, repl), repl)
def stripQuotes(s: String, repl: String="\\"", bothSide: Boolean=true): String =
if(bothSide) s.stripPrefix(repl).stripSuffix(repl) else s.stripPrefix(repl)
def splits: (String, String) => Array[String] = StringUtils.splitByWholeSeparatorPreserveAllTokens
def splitsLimit: (String, String, Int) => Array[String] = StringUtils.splitByWholeSeparatorPreserveAllTokens
def errorHandler(bogus: String, t: Throwable): Unit = logger.warn(t.getMessage + s": $bogus")
def generateUUID(prefix: String): String = s"${prefix}_${UUID.randomUUID.toString.takeRight(12)}"
}
*/
|
nicaiseeric/lazy-json
|
src/main/scala/ca/aretex/labs/data/TicketDataUtil.scala
|
Scala
|
apache-2.0
| 3,264
|
package simutils
import akka.actor._
import dmfmessages.DMFSimMessages._
import scala.collection.mutable.ListBuffer
import scala.collection.JavaConversions._
object RemoteWorkers {
def translateAkkaAddres(akkaAddress: AkkaAddress): Address = Address(akkaAddress.getProtocol, akkaAddress.getSystem, akkaAddress.getHost, akkaAddress.getPort)
def buildAkkaAddress(address: Address): AkkaAddress = AkkaAddress.newBuilder()
.setProtocol(address.protocol).setSystem(address.system).setHost(address.host.getOrElse("")).setPort(address.port.getOrElse(0)).build
def buildRemoteWorkersReady(addresses: Seq[Address]): RemoteWorkersReady = {
val addressList = addresses.map (a => buildAkkaAddress(a))
RemoteWorkersReady.newBuilder().addAllAddresses(addressList).build
}
def buildNewRemoteWorker(address: Address) = NewRemoteWorker.newBuilder().setAddress(buildAkkaAddress(address)).build
/**
* Factory method for [[akka.actor.Props]] creation for [[RemoteWorkers]]
*
* @param readyActor Actor to notify when workers are ready
* @param minWorkers Number of worker required before [[RemoteWorkers]] is ready
* @return [[akka.actor.Props]] for [[RemoteWorkers]] [[akka.actor.Actor]] creation
*/
def props(readyActor: ActorRef, minWorkers: Int) = Props(new RemoteWorkers(readyActor, minWorkers))
}
class RemoteWorkers(val readyActor: ActorRef, val minWorkers: Int = 1) extends Actor {
val workers = ListBuffer[Address]()
var index = 0
var started = false
import RemoteWorkers._
def receive = {
case nrw: NewRemoteWorker =>
val addr = nrw.getAddress
val address = Address(addr.getProtocol, addr.getSystem, addr.getHost, addr.getPort)
println("Adding " + address + " to workers list")
workers += address
if (workers.size == minWorkers && !started) {
started = true
readyActor ! buildRemoteWorkersReady(workers.toList)
self ! PoisonPill
}
}
}
|
rkewley/devsdmf
|
src/main/scala/simutils/RemoteWorkers.scala
|
Scala
|
apache-2.0
| 1,960
|
//
// Extended.scala -- Scala class and objects for the Orc extended AST
// Project OrcScala
//
// $Id: Extended.scala 3023 2012-04-20 02:55:50Z laurenyew $
//
// Created by dkitchin on May 19, 2010.
//
// Copyright (c) 2011 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.ast.ext
import orc.ast.AST
import orc.ast.OrcSyntaxConvertible
sealed abstract class Expression extends AST
case class Stop() extends Expression
case class Constant(c: AnyRef) extends Expression
case class Variable(name: String) extends Expression
case class TupleExpr(elements: List[Expression]) extends Expression { require(elements.size > 1) }
case class ListExpr(elements: List[Expression]) extends Expression
case class RecordExpr(elements: List[(String, Expression)]) extends Expression
case class Call(target: Expression, gs: List[ArgumentGroup]) extends Expression
case object Hole extends Expression
sealed abstract class ArgumentGroup extends AST
case class Args(types: Option[List[Type]] = None, elements: List[Expression]) extends ArgumentGroup
case class FieldAccess(field: String) extends ArgumentGroup
case object Dereference extends ArgumentGroup
case class PrefixOperator(op: String, arg: Expression) extends Expression
case class InfixOperator(left: Expression, op: String, right: Expression) extends Expression
case class Sequential(left: Expression, p: Option[Pattern] = None, right: Expression) extends Expression
case class Parallel(left: Expression, right: Expression) extends Expression
case class Pruning(left: Expression, p: Option[Pattern] = None, right: Expression) extends Expression
case class Otherwise(left: Expression, right: Expression) extends Expression
case class Lambda(
typeformals: Option[List[String]] = None,
formals: List[Pattern],
returntype: Option[Type] = None,
guard: Option[Expression] = None,
body: Expression) extends Expression
case class Conditional(ifE: Expression, thenE: Expression, elseE: Expression) extends Expression
case class Declare(declaration: Declaration, body: Expression) extends Expression
case class TypeAscription(e: Expression, t: Type) extends Expression
case class TypeAssertion(e: Expression, t: Type) extends Expression
//Expression (1+2)@A
case class SecurityLevelExpression(e: Expression, level: String) extends Expression
// An internal representation for the body of a 'def class'
case class DefClassBody(body: Expression) extends Expression
sealed abstract class Declaration extends AST
case class Val(p: Pattern, e: Expression) extends Declaration
case class Include(origin: String, decls: List[Declaration]) extends Declaration
sealed abstract class NamedDeclaration extends Declaration {
val name: String
}
sealed abstract class DefDeclaration extends NamedDeclaration
case class Def(name: String, typeformals: Option[List[String]], formals: List[Pattern], returntype: Option[Type], guard: Option[Expression], body: Expression) extends DefDeclaration
case class DefClass(name: String, typeformals: Option[List[String]], formals: List[Pattern], returntype: Option[Type], guard: Option[Expression], body: Expression) extends DefDeclaration
case class DefSig(name: String, typeformals: Option[List[String]], argtypes: List[Type], returntype: Type) extends DefDeclaration
// Convenience extractor for sequences of definitions enclosing some scope
object DefGroup {
def unapply(e: Expression): Option[(List[DefDeclaration], Expression)] = {
partition(e) match {
case (Nil, _) => None
case (ds, f) => Some((ds, f))
}
}
private def partition(e: Expression): (List[DefDeclaration], Expression) = {
e match {
case Declare(d: DefDeclaration, f) => {
val (ds, g) = partition(f)
(d :: ds, g)
}
case _ => (Nil, e)
}
}
}
sealed abstract class SiteDeclaration extends NamedDeclaration
case class SiteImport(name: String, sitename: String) extends SiteDeclaration
case class ClassImport(name: String, classname: String) extends SiteDeclaration
sealed abstract class TypeDeclaration extends NamedDeclaration
case class TypeAlias(name: String, typeformals: List[String] = Nil, aliasedtype: Type) extends TypeDeclaration
case class TypeImport(name: String, classname: String) extends TypeDeclaration
case class Datatype(name: String, typeformals: List[String] = Nil, constructors: List[Constructor]) extends TypeDeclaration
//SecurityLevelDeclaration
//For DeclSL in parser
//sealed: class can't be referenced outside of file
//ident is parser that gives String
sealed case class SecurityLevelDeclaration(name: String, parents: List[String], children: List[String]) extends NamedDeclaration
case class Constructor(name: String, types: List[Option[Type]]) extends AST
sealed abstract class Pattern extends AST with OrcSyntaxConvertible {
val isStrict: Boolean
}
sealed abstract class NonStrictPattern extends Pattern {
val isStrict = false
}
case class Wildcard() extends NonStrictPattern { override def toOrcSyntax = "_" }
case class VariablePattern(name: String) extends NonStrictPattern { override def toOrcSyntax = name }
sealed abstract class StrictPattern extends Pattern {
val isStrict = true
}
case class ConstantPattern(c: AnyRef) extends StrictPattern { override def toOrcSyntax = if (c == null) "null" else c.toString }
case class TuplePattern(elements: List[Pattern]) extends StrictPattern { override def toOrcSyntax = elements.map(_.toOrcSyntax).mkString("(", ", ", ")") }
case class ListPattern(elements: List[Pattern]) extends StrictPattern { override def toOrcSyntax = elements.map(_.toOrcSyntax).mkString("[", ", ", "]") }
case class CallPattern(name: String, args: List[Pattern]) extends StrictPattern { override def toOrcSyntax = name + args.map(_.toOrcSyntax).mkString("(", ", ", ")") }
case class ConsPattern(head: Pattern, tail: Pattern) extends StrictPattern { override def toOrcSyntax = "(" + head.toOrcSyntax + ":" + tail.toOrcSyntax + ")" }
case class RecordPattern(elements: List[(String, Pattern)]) extends StrictPattern { override def toOrcSyntax = elements.map({ case (f, p) => f + " = " + p.toOrcSyntax }).mkString("{. ", ", ", " .}") }
//SecurityType pattern
//ST
case class SecurityLevelPattern(p: Pattern, name: String) extends Pattern {
val isStrict = p.isStrict
override def toOrcSyntax = p.toOrcSyntax + " @" + name
}
case class AsPattern(p: Pattern, name: String) extends Pattern {
val isStrict = p.isStrict
override def toOrcSyntax = p.toOrcSyntax + " as " + name
}
case class TypedPattern(p: Pattern, t: Type) extends Pattern {
val isStrict = p.isStrict
override def toOrcSyntax = p.toOrcSyntax + " :: " + t.toOrcSyntax
}
sealed abstract class Type extends AST with OrcSyntaxConvertible
case class TypeVariable(name: String) extends Type { override def toOrcSyntax = name }
case class TupleType(elements: List[Type]) extends Type { override def toOrcSyntax = elements.map(_.toOrcSyntax).mkString("(", ", ", ")") }
case class RecordType(elements: List[(String, Type)]) extends Type { override def toOrcSyntax = elements.map({ case (f, t) => f + " :: " + t.toOrcSyntax }).mkString("{. ", ", ", " .}") }
case class LambdaType(typeformals: List[String], argtypes: List[Type], returntype: Type) extends Type {
override def toOrcSyntax = "lambda" + (if (typeformals.size > 0) typeformals.mkString("[", ", ", "]") else "") + argtypes.map(_.toOrcSyntax).mkString("(", ", ", ")") + " :: " + returntype.toOrcSyntax
}
case class TypeApplication(name: String, typeactuals: List[Type]) extends Type { override def toOrcSyntax = name + typeactuals.map(_.toOrcSyntax).mkString("[", ", ", "]") }
|
laurenyew/cOrcS
|
src/orc/ast/ext/Extended.scala
|
Scala
|
bsd-3-clause
| 7,831
|
package beam.utils.scenario.urbansim
private[urbansim] object DataExchange {
case class UnitInfo(unitId: String, buildingId: String)
case class ParcelAttribute(primaryId: String, x: Double, y: Double)
case class BuildingInfo(buildingId: String, parcelId: String)
case class PersonInfo(
personId: String,
householdId: String,
rank: Int,
age: Int,
isFemale: Boolean,
valueOfTime: Double
)
case class PlanElement(
personId: String,
planElement: String,
planElementIndex: Int,
activityType: Option[String],
x: Option[Double],
y: Option[Double],
endTime: Option[Double],
mode: Option[String]
)
case class HouseholdInfo(householdId: String, cars: Int, income: Double, unitId: String, buildingId: String)
}
|
colinsheppard/beam
|
src/main/scala/beam/utils/scenario/urbansim/DataExchange.scala
|
Scala
|
gpl-3.0
| 778
|
package com.coiney.akka.mailer.actors
import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestProbe, TestKit}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
class DispatcherSpec(_actorSystem: ActorSystem) extends TestKit(_actorSystem)
with ImplicitSender
with WordSpecLike
with Matchers
with BeforeAndAfterAll
with MailerSpec {
def this() = this(ActorSystem("DispatcherSpec"))
override def afterAll(): Unit = {
system.shutdown()
}
"Dispatcher actor" should {
"inform the master that it is created" in {
val probe = TestProbe()
val dispatcher = TestDispatcherRef(probe.ref)
probe.expectMsg(Master.DispatcherCreated(dispatcher))
}
"request an email when being informed there's new ones available" in {
val probe = TestProbe()
val dispatcher = TestDispatcherRef(probe.ref)
probe.expectMsg(Master.DispatcherCreated(dispatcher))
probe.send(dispatcher, Dispatcher.MailsAvailable)
probe.expectMsg(Master.MailRequest(dispatcher))
}
"process an email, confirm processing and request a new one" in {
val probe = TestProbe()
val dispatcher = TestDispatcherRef(probe.ref)
val email = randomEmail()
probe.expectMsg(Master.DispatcherCreated(dispatcher))
probe.send(dispatcher, Dispatcher.SendMail(email))
probe.expectMsg(Master.MailSent(dispatcher))
probe.expectMsg(Master.MailRequest(dispatcher))
}
}
}
|
Coiney/akka-mailer
|
akka-mailer-core/src/test/scala/com/coiney/akka/mailer/actors/DispatcherSpec.scala
|
Scala
|
bsd-3-clause
| 1,706
|
package org.jetbrains.plugins.scala
package codeInspection
package collections
import com.intellij.testFramework.EditorTestUtil
/**
* Nikolay.Tropin
* 1/24/14
*/
class SortFilterTest extends OperationsOnCollectionInspectionTest {
override protected val classOfInspection: Class[_ <: OperationOnCollectionInspection] =
classOf[SortFilterInspection]
override protected val hint: String =
ScalaInspectionBundle.message("sort.filter.hint")
def testWithoutParams(): Unit = {
val selected = s"List(0, 1).${START}sorted.filter(_ => true)$END"
checkTextHasError(selected)
val text = "List(0, 1).sorted.filter(_ => true)"
val result = "List(0, 1).filter(_ => true).sorted"
testQuickFix(text, result, hint)
}
def testWithParameter(): Unit = {
val selected = s"List(0, 1).${START}sortWith((x, y) => x < y).filter(_ => true)$END"
checkTextHasError(selected)
val text = "List(0, 1).sortWith((x, y) => x < y).filter(_ => true)"
val result = "List(0, 1).filter(_ => true).sortWith((x, y) => x < y)"
testQuickFix(text, result, hint)
}
def testWithGenericParameter(): Unit = {
val selected = s"List(0, 1).${START}sortBy[String](_.toString).filter(_ => true)$END"
checkTextHasError(selected)
val text = "List(0, 1).sortBy[String](_.toString).filter(_ => true)"
val result = "List(0, 1).filter(_ => true).sortBy[String](_.toString)"
testQuickFix(text, result, hint)
}
def testInfix(): Unit = {
val selected = s"List(0, 1).${START}sortBy[String](_.toString) filter (_ => true)$END"
checkTextHasError(selected)
val text = "List(0, 1).sortBy[String](_.toString) filter (_ => true)"
val result = "List(0, 1).filter(_ => true).sortBy[String](_.toString)"
testQuickFix(text, result, hint)
}
def testWithSideEffect(): Unit = {
checkTextHasNoErrors(
"""
|var q = 1
|Seq(3, 1, 2).sorted.filter {
| i =>
| q += 1
| i % 2 == 0
|}
""".stripMargin)
}
}
|
JetBrains/intellij-scala
|
scala/scala-impl/test/org/jetbrains/plugins/scala/codeInspection/collections/SortFilterTest.scala
|
Scala
|
apache-2.0
| 2,020
|
package jadecrawler.website
import scala.language.postfixOps
import jadeutils.common.Logging
import org.apache.commons.lang.StringUtils.isNotBlank
import org.json.JSONObject
import org.jsoup.Jsoup
import org.jsoup.nodes.Document
import org.jsoup.nodes.Element
import org.jsoup.select.Elements
import jadecrawler.dto.website.IcibaDto
import jadecrawler.dto.website.IcibaS2Dto
import jadecrawler.dto.website.IcibaS3Dto
import jadecrawler.dto.website.IcibaHomoDto
import jadeutils.common.JsoupUtils._
import scala.collection.JavaConversions._
object WebElongParser extends Logging {
val inDatePatten = """WebCategory: "([a-zA-Z])",""".r
def parsePage(htmlStr: String): String = {
val doc = Jsoup parse htmlStr
val srcBlks= doc select "script"
val srclist = (for (src <- srcBlks; rec = src.html if rec.contains("""var DetailController ="""))
yield { rec }) toList;
val s = if (null != srclist && srclist.size > 0) { srclist(0) } else {null}
// logger.debug("DetailController: {}",s)
val searchId = if(null!=s) {
var ctx = org.mozilla.javascript.Context.enter();
var jsBeautyScope = ctx.initStandardObjects();
var inDate = ctx.evaluateString(jsBeautyScope, s+ " ;\\"\\"+DetailController.inDate", null, 0, null);
var outDate = ctx.evaluateString(jsBeautyScope, s+ " ;\\"\\"+DetailController.outDate", null, 0, null);
var spuIds= ctx.evaluateString(jsBeautyScope, s+ " ;\\"\\"+DetailController.AjaxSupplierIDList", null, 0, null);
var schId= ctx.evaluateString(jsBeautyScope, s+ " ;\\"\\"+DetailController.DetailSearchId", null, 0, null);
logger.debug("inDate: {}",inDate)
logger.debug("outDate: {}",outDate)
logger.debug("spuIds: {}", spuIds )
logger.debug("schId: {}", schId)
spuIds.toString
} else {null}
searchId
}
}
object ElongCrawler extends Logging {
val site = "ihotel.elong.com"
def process(cityId: String, cityName: String, hotelId: String,
inDate: String, outDate: String): String =
{
val page = fetchPage(cityId, cityName, hotelId, inDate, outDate)
val searchId = if (null != page) { WebElongParser.parsePage(page) }
else { null }
val data = if (null != searchId) {
fetchData(cityId, cityName, hotelId, inDate, outDate, searchId)
} else { null }
""
}
def fetchData(cityId: String, cityName: String, hotelId: String,
inDate: String, outDate: String, searchId: String): String =
{
val pageUrl = "http://ihotel.elong.com/isajax/Detail/getSupplierRoomList/"
val cookie = genCookie(inDate, outDate)
val data = try {
import jadecrawler.net.HTTPUtil
val resp = HTTPUtil.doPost(pageUrl, HTTPUtil.firefoxParams +
// ("Host" -> site) +
("Cookie" -> cookie),
("hotelid", hotelId):: ("searchid", searchId):: ("regionid", cityId)::
("cityen", cityName) :: ("viewpath", "~/views/channel/Detail.aspx") ::
("supplierIDList[0]", "2") :: Nil)
if (null != resp && null != resp.content && resp.content.length > 0) {
new String(resp.content)
} else ""
} catch { case e: Throwable => {
logger warn ("elong crawler error: {}, {}, {}, {}, {}, {}",
Array(cityId, cityName, hotelId, inDate, outDate, e)); null }
}
logger.debug(data)
data
}
def fetchPage(cityId: String, cityName: String, hotelId: String,
inDate: String, outDate: String): String =
{
//Thread.sleep(10000)
val pageUrl = "http://ihotel.elong.com/detail-%s-%s/%s/".format(
cityName, cityId, hotelId)
logger debug ("pageurl is: {}", pageUrl)
val cookie = genCookie(inDate, outDate)
logger debug ("cookie is: {}", cookie)
val page = try {
import jadecrawler.net.HTTPUtil
val resp = HTTPUtil.doGet(pageUrl, HTTPUtil.firefoxParams +
// ("Host" -> site) +
("Cookie" -> cookie))
if (null != resp && null != resp.content && resp.content.length > 0) {
new String(resp.content)
} else ""
} catch { case e: Throwable => {
logger warn ("elong crawler error: {}, {}, {}, {}, {}, {}",
Array(cityId, cityName, hotelId, inDate, outDate, e)); null }
}
// logger.debug(page)
page
}
def genCookie(inDate: String, outDate: String): String =
"IHotelSearch=OutDate=" +
outDate + "+0:00:00&InDate=" + inDate + "+0:00:00&RoomPerson=1|2;"
// "IHotelSearch=InDate=" + inDate + "&OutDate=" +
// outDate + "&RoomPerson=1|2;"
}
|
Jade-Shan/Jade-crawler
|
crawler-logic/src/main/scala/website/ElongParser.scala
|
Scala
|
gpl-3.0
| 4,291
|
package org.scalaide.core
package quickassist
import org.eclipse.jdt.internal.core.util.SimpleDocument
import org.junit.AfterClass
import org.junit.Assert
import org.junit.BeforeClass
import org.junit.Test
import org.scalaide.core.internal.quickassist.explicit.ExplicitReturnType
object ExplicitTypeAssistTest extends QuickAssistTest {
@BeforeClass
def createProject() = create("assist")
@AfterClass
def deleteProject() = delete()
}
/** This test suite requires the UI. */
class ExplicitTypeAssistTest extends QuickAssistTestHelper {
import ExplicitTypeAssistTest._
val quickAssist = new ExplicitReturnType
def createSource(packageName: String, unitName: String)(contents: String) = createSourceFile(packageName, unitName)(contents)
def assistsFor(contents: String, expected: String): Unit =
runQuickAssistWith(contents) { p =>
Assert.assertTrue("Add explicit type proposal not found", p.nonEmpty)
val doc = new SimpleDocument(contents.filterNot(_ == '^'))
p.head.apply(doc)
Assert.assertEquals("Changes unexpected", expected, doc.get())
}
@Test
def assistVal(): Unit = {
assistsFor("""
class Test {
val foo = ^42
}
""".stripMargin, """
class Test {
val foo: Int = 42
}
""".stripMargin)
}
@Test
def assistDef(): Unit = {
assistsFor("""
class Test {
def foo(x: Int) = ^x + 1
}
""".stripMargin, """
class Test {
def foo(x: Int): Int = x + 1
}
""".stripMargin)
}
@Test
def assistList(): Unit = {
assistsFor("""
class Test {
def foo(x: Int) = ^List.fill(x)(0)
}
""".stripMargin, """
class Test {
def foo(x: Int): List[Int] = List.fill(x)(0)
}
""".stripMargin)
}
@Test
def assistMultiLine(): Unit = {
assistsFor("""
class Test {
def foo(x: Int) = ^{
List.fill(x)(0)
}
}
""".stripMargin, """
class Test {
def foo(x: Int): List[Int] = {
List.fill(x)(0)
}
}
""".stripMargin)
}
@Test
def assistComplexSignature(): Unit = {
assistsFor("""
class Test {
def foo[T](size: Int = 42, init: T)(implicit ord: Ordered[T]) = {
^List.fill(size)(init)
}
}
""".stripMargin, """
class Test {
def foo[T](size: Int = 42, init: T)(implicit ord: Ordered[T]): List[T] = {
List.fill(size)(init)
}
}
""".stripMargin)
}
@Test
def assistInnerScopeVal(): Unit = {
assistsFor("""
class Test {
def foo(x: Int) = {
val size = 10
val bar = ^List.fill(size)(0)
}
}
""".stripMargin, """
class Test {
def foo(x: Int) = {
val size = 10
val bar: List[Int] = List.fill(size)(0)
}
}
""".stripMargin)
}
@Test
def assistInnerScopeDef(): Unit = {
assistsFor("""
class Test {
def foo(x: Int) = {
val size = 10
def bar[T](init: T) = ^List.fill(size)(init)
}
}
""".stripMargin, """
class Test {
def foo(x: Int) = {
val size = 10
def bar[T](init: T): List[T] = List.fill(size)(init)
}
}
""".stripMargin)
}
@Test
def assistTransitive(): Unit = {
assistsFor("""
class Test {
val x = ^initialize()
def initialize() = {
cout += 1
count
}
var count = 0
}
""".stripMargin, """
class Test {
val x: Int = initialize()
def initialize() = {
cout += 1
count
}
var count = 0
}
""".stripMargin)
}
@Test
def assistMultiAssign(): Unit = {
assistsFor("""
class Test {
val x, y, z = ^initialize()
def initialize() = 0
}
""".stripMargin, """
class Test {
val x, y, z: Int = initialize()
def initialize() = 0
}
""".stripMargin)
}
@Test
def noAssistPatMat(): Unit = {
noAssistsFor("""
class Test {
val Some(x) = ^Option(new Object)
}
""".stripMargin)
}
@Test
def noAssistTuple(): Unit = {
noAssistsFor("""
class Test {
val (x, y) = ^(1, 2)
}
""".stripMargin)
}
@Test
def assistOperatorVal(): Unit = {
assistsFor("""
class Test {
val ~ = ^42
}
""".stripMargin, """
class Test {
val ~ : Int = 42
}
""".stripMargin)
}
@Test
def assistOperatorDef(): Unit = {
assistsFor("""
class Test {
def ++ = ^42
}
""".stripMargin, """
class Test {
def ++ : Int = 42
}
""".stripMargin)
}
}
|
dragos/scala-ide
|
org.scala-ide.sdt.core.tests/src/org/scalaide/core/quickassist/ExplicitTypeAssistTest.scala
|
Scala
|
bsd-3-clause
| 5,073
|
// Copyright (C) 2019 MapRoulette contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
package org.maproulette.provider.websockets
import akka.actor._
import akka.cluster.pubsub.DistributedPubSub
import akka.cluster.pubsub.DistributedPubSubMediator.{Publish}
/**
* WebSocketPublisher is an Akka actor that is responsible for publishing
* server-initiated messages to the Akka mediator that manages the
* publish/subscribe of the various message types to the WebSocketActor
* instances that represent the client websockets.
*
* Note that server code should generally use the WebSocketProvider.sendMessage
* method, rather than trying to access this actor directly, so that it need
* not worry about interfacing with the Akka actor system.
*
* @author nrotstan
*/
class WebSocketPublisher extends Actor {
val mediator = DistributedPubSub(context.system).mediator
def receive = {
case message: WebSocketMessages.ServerMessage =>
message.meta.subscriptionName match {
case Some(name) => mediator ! Publish(name, message)
case None => None // Ignore messages not intended for publication
}
}
}
|
mvexel/maproulette2
|
app/org/maproulette/provider/websockets/WebSocketPublisher.scala
|
Scala
|
apache-2.0
| 1,189
|
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt
/** A mutable set interface that uses object identity to test for set membership.*/
trait IDSet[T] {
def apply(t: T): Boolean
def contains(t: T): Boolean
def +=(t: T): Unit
def ++=(t: Iterable[T]): Unit
def -=(t: T): Boolean
def all: collection.Iterable[T]
def toList: List[T]
def isEmpty: Boolean
def foreach(f: T => Unit): Unit
def process[S](t: T)(ifSeen: S)(ifNew: => S): S
}
object IDSet {
implicit def toTraversable[T]: IDSet[T] => Traversable[T] = _.all
def apply[T](values: T*): IDSet[T] = apply(values)
def apply[T](values: Iterable[T]): IDSet[T] =
{
val s = create[T]
s ++= values
s
}
def create[T]: IDSet[T] = new IDSet[T] {
private[this] val backing = new java.util.IdentityHashMap[T, AnyRef]
private[this] val Dummy: AnyRef = ""
def apply(t: T) = contains(t)
def contains(t: T) = backing.containsKey(t)
def foreach(f: T => Unit) = all foreach f
def +=(t: T) = backing.put(t, Dummy)
def ++=(t: Iterable[T]) = t foreach +=
def -=(t: T) = if (backing.remove(t) eq null) false else true
def all = collection.JavaConversions.collectionAsScalaIterable(backing.keySet)
def toList = all.toList
def isEmpty = backing.isEmpty
def process[S](t: T)(ifSeen: S)(ifNew: => S) = if (contains(t)) ifSeen else { this += t; ifNew }
override def toString = backing.toString
}
}
|
xeno-by/old-scalameta-sbt
|
util/collection/src/main/scala/sbt/IDSet.scala
|
Scala
|
bsd-3-clause
| 1,456
|
package com.avsystem.scex.util
import java.io.ByteArrayOutputStream
object EscapedBytes {
def render(bytes: Array[Byte]): String = {
val sb = new StringBuilder
bytes.foreach {
case '\\' => sb ++= "\\\\"
case b if b > 0x1F && b < 0x7F => sb += b.toChar
case b => sb ++= f"\\x$b%02x"
}
sb.result()
}
def parse(repr: String): Array[Byte] = {
val baos = new ByteArrayOutputStream
def loop(it: Iterator[Char]): Unit = if (it.hasNext) {
it.next() match {
case '\\' => it.next() match {
case 'x' =>
def readDigit(): Int = if (it.hasNext) it.next() match {
case d if d >= '0' && d <= '9' => d - '0'
case d if d >= 'A' && d <= 'F' => d - 'A' + 10
case d if d >= 'a' && d <= 'f' => d - 'a' + 10
case c => throw new IllegalArgumentException(s"Expected hex digit, got character: $c")
} else throw new IllegalArgumentException(s"Expected hex digit, got end of string")
var byte = readDigit() * 16
byte += readDigit()
baos.write(byte)
case '\\' => baos.write('\\')
case c =>
throw new IllegalArgumentException(s"Invalid escape character: $c, only \\ and hex escapes are allowed")
}
case c if c > 0x1F && c < 0x7F =>
baos.write(c)
case c =>
throw new IllegalArgumentException(s"Invalid character in binary representation: $c")
}
loop(it)
}
loop(repr.iterator)
baos.toByteArray
}
}
|
AVSystem/scex
|
scex-util/src/main/scala/com/avsystem/scex/util/EscapedBytes.scala
|
Scala
|
mit
| 1,560
|
package io.udash.web.guide.markdown
import java.io.{BufferedReader, File, FileReader}
import java.time.Instant
import java.util.concurrent.ConcurrentHashMap
import com.avsystem.commons._
import com.vladsch.flexmark.html.HtmlRenderer
import com.vladsch.flexmark.parser.Parser
import scala.concurrent.{ExecutionContext, Future}
final class MarkdownPagesEndpoint(guideResourceBase: String)(implicit ec: ExecutionContext) extends MarkdownPageRPC {
private val parser = Parser.builder.build
private val renderer = HtmlRenderer.builder.build
private val renderedPages = new ConcurrentHashMap[MarkdownPage, (Future[String], Instant)]
private def render(file: File): Future[String] = Future {
val reader = new BufferedReader(new FileReader(file))
val document = parser.parseReader(reader)
renderer.render(document)
}
override def loadContent(page: MarkdownPage): Future[String] = {
val (result, _) = renderedPages.compute(page, { (_, cached) =>
val pageFile = new File(guideResourceBase + page.file)
cached.opt.filter {
case (currentRender, renderedInstant) =>
currentRender.value.exists(_.isSuccess) && renderedInstant.toEpochMilli >= pageFile.lastModified()
}.getOrElse((render(pageFile), Instant.ofEpochMilli(pageFile.lastModified())))
})
result
}
}
|
UdashFramework/udash-guide
|
backend/src/main/scala/io/udash/web/guide/markdown/MarkdownPagesEndpoint.scala
|
Scala
|
gpl-3.0
| 1,325
|
package org.allenai.common
import spray.json.{ deserializationError, JsString, JsValue, RootJsonFormat }
/** Enumeration implementation that supports automatic Spray JSON serialization of a case object as
* a JsString.
*
* Usage:
* (format: OFF)
* {{{
* sealed abstract class MyEnum extends Enum[MyEnum]
* object MyEnum extends EnumCompanion[MyEnum] {
* case object One extends MyEnum
* case object Two extends MyEnum
* register(One, Two)
* }
*
* // JSON serialization:
* MyEnum.One.toJson // JsString("One")
* MyEnum.Two.toJson // JsString("Two")
* JsString("One").convertTo[MyEnum] // MyEnum.One
* JsString("Two").convertTo[MyEnum] // MyEnum.Two
* }}}
* (format: ON)
*/
abstract class Enum[E <: Enum[E]] {
/** The serialization string. By default, use the toString implementation. For a case object, this
* uses the object name.
*/
def id: String = toString
}
/** Superclass for Enum companion objects providing enum registration and JSON serialization */
abstract class EnumCompanion[E <: Enum[E]] {
/** Internal registry of enums */
private[this] var registry = Map[String, E]()
/** Lookup enum by ID
* @param id
*/
def withId(id: String): E = registry(id)
def all: Iterable[E] = registry.values
/** Register enums so they can be looked up by ID and be included in `all` iterable
* @param enums
*/
// TODO(markschaake): this might be a prime candidate for a macro which can generate
// exhaustive pattern matching instead of realying on the user to manually register
// each case object.
protected def register(enums: E*) = enums foreach { e =>
registry = registry + (e.id -> e)
}
implicit object EnumJsonFormat extends RootJsonFormat[E] {
override def read(jsValue: JsValue): E = jsValue match {
case JsString(id) => withId(id)
case other => deserializationError(s"Enum id must be a JsString: $other")
}
override def write(e: E): JsValue = JsString(e.id)
}
}
|
jkinkead/common
|
core/src/main/scala/org/allenai/common/Enum.scala
|
Scala
|
apache-2.0
| 2,000
|
/**
* Magmanics Licensing. This web application allows for centralized control
* of client application activation, with optional configuration parameters
* to control licensable features, and storage of supplementary information
* about the client machine. Client applications may interface with this
* central server (for activation) using libraries licenced under an
* alternative licence.
*
* Copyright (C) 2010 James Baxter <j.w.baxter(at)gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.magmanics.vaadin.component
import com.vaadin.shared.ui.label.ContentMode
import com.vaadin.ui.Label
/**
* @author jbaxter - 07/04/11
*/
class HtmlLabel(caption: String) extends Label(caption, ContentMode.HTML)
|
manicmonkey/licensing
|
Licensing-UI-Vaadin/src/main/scala/com/magmanics/vaadin/component/HtmlLabel.scala
|
Scala
|
gpl-3.0
| 1,342
|
package controllers
import global.Globals
import models.Profile
import services.ApiServices
import javax.inject._
import play.api._
import play.api.mvc._
import play.api.libs.json._
import io.swagger.core._
import io.swagger.annotations._
import play.api.libs.concurrent.Execution.Implicits._
@Api(value = "/profile", description = "Full Profile")
class ProfileController @Inject() (apiServices: ApiServices) extends ApiController {
implicit val profileFmt = Profile.format
@ApiOperation(value = "Returns entire profile", response = classOf[Profile], httpMethod = "GET")
def get(userSlug: Option[String]) = CORSAction {
val slug = userSlug.getOrElse(Globals.defaultUserSlug)
apiServices.userService.findProfileByUserSlug(slug).map {
case Some(profile) => Ok(Json.toJson(profile))
case None => NotFound
}
}
}
|
gilbertw1/personal-api
|
app/controllers/ProfileController.scala
|
Scala
|
gpl-2.0
| 848
|
package org.openurp.edu.eams.number
import java.util.Arrays
import org.beangle.commons.text.i18n.TextResource
object NumberRangeDigestor {
def digest(numberSequence: Array[Int], textResource: TextResource): String = {
digest(numberSequence, textResource, DefaultNumberRangeFormatter.getInstance)
}
def digest(numberSequence: Array[Int], textResource: TextResource, formatter: NumberRangeFormatter): String = {
if (numberSequence == null || numberSequence.length == 0) {
return ""
}
Arrays.sort(numberSequence)
val patterns = new collection.mutable.ListBuffer[NumberRange]
var lastPattern = NumberRange.newInstance(numberSequence(0))
patterns += lastPattern
for (i <- 1 until numberSequence.length) {
val number = numberSequence(i)
if (!lastPattern.test(number)) {
lastPattern = lastPattern.guessNextPattern(number)
patterns += lastPattern
}
}
val sb = new StringBuilder()
var iterator = patterns.iterator
while (iterator.hasNext) {
val pattern = iterator.next()
if (!pattern.isAbandon) {
sb.append(formatter.format(pattern, textResource, iterator.hasNext))
}
}
sb.toString
}
def digest(numberSequence: Array[Integer], textResource: TextResource): String = {
if (numberSequence == null || numberSequence.length == 0) {
return ""
}
val integers = Array.ofDim[Int](numberSequence.length)
for (i <- 0 until numberSequence.length) {
integers(i) = numberSequence(i)
}
digest(integers, textResource)
}
def digest(numberSequence: Array[Integer], textResource: TextResource, formatter: NumberRangeFormatter): String = {
if (numberSequence == null || numberSequence.length == 0) {
return null
}
val integers = Array.ofDim[Int](numberSequence.length)
for (i <- 0 until numberSequence.length) {
integers(i) = numberSequence(i)
}
digest(integers, textResource, formatter)
}
}
|
openurp/edu-eams-webapp
|
core/src/main/scala/org/openurp/edu/eams/number/NumberRangeDigestor.scala
|
Scala
|
gpl-3.0
| 1,979
|
package com.typesafe.slick.testkit.util
import slick.profile.{SqlProfile, RelationalProfile, BasicProfile}
import slick.driver.JdbcProfile
import java.io.{PrintWriter, OutputStreamWriter, BufferedWriter, FileOutputStream, FileWriter}
/** Build a table of supported capability flags for the user manual. */
object BuildCapabilitiesTable extends App {
// testkit/runMain com.typesafe.slick.testkit.util.BuildCapabilitiesTable ../src/sphinx/capabilities.csv
if(args.length < 1 || args.length > 2) {
println("Syntax: com.typesafe.slick.testkit.util.BuildCapabilitiesTable OUTPUTFILE [DRIVERLIST]")
System.exit(1)
}
val driverNames = if(args.length > 1) args(1).split(",") else Array(
"slick.driver.DerbyDriver",
"slick.driver.H2Driver",
"slick.driver.HsqldbDriver",
"slick.driver.MySQLDriver",
"slick.driver.PostgresDriver",
"slick.driver.SQLiteDriver"
)
val drivers = driverNames.map { n =>
Class.forName(n + "$").getField("MODULE$").get(null).asInstanceOf[BasicProfile]
}
val profiles = Vector(
RelationalProfile.capabilities.all -> "slick.profile.RelationalProfile$$capabilities$@",
SqlProfile.capabilities.all -> "slick.profile.SqlProfile$$capabilities$@",
JdbcProfile.capabilities.all -> "slick.driver.JdbcProfile$$capabilities$@"
)
val capabilities = for {
(caps, linkBase) <- profiles
cap <- caps.toVector.sortBy(c => if(c.toString.endsWith(".other")) "" else c.toString)
} yield (cap, linkBase + cap.toString.replaceFirst(".*\\\\.", "") + ":slick.profile.Capability")
val out = new FileOutputStream(args(0))
try {
val wr = new PrintWriter(new BufferedWriter(new OutputStreamWriter(out, "UTF-8")))
wr.println("Capability," + driverNames.map(n => s":api:`$n`").mkString(","))
for((cap, link) <- capabilities) {
val flags = drivers.map(d => d.capabilities.contains(cap))
wr.println(s":api:`$cap <$link>`," + flags.map(b => if(b) "Yes" else "").mkString(","))
}
wr.flush()
} finally out.close()
}
|
jkutner/slick
|
slick-testkit/src/main/scala/com/typesafe/slick/testkit/util/BuildCapabilitiesTable.scala
|
Scala
|
bsd-2-clause
| 2,018
|
import org.scalatest._
import scala.meta.internal.ast._
import scala.meta.dialects.Scala211
class JoinSuite extends FunSuite {
test("idents") {
val Term.Name("xtemp") = {
import scala.meta._
q"xtemp"
}
val Term.Name("ytemp") = {
import scala.meta._
q"ytemp"
}
}
test("vals") {
val fields = List(Pat.Var.Term(Term.Name("x")) -> Term.Name("xtemp"), Pat.Var.Term(Term.Name("y")) -> Term.Name("ytemp"))
val vals = fields.map{ case (f, ref) =>
import scala.meta._
q"val $f = $ref.${f.name}"
}
assert(vals.length === 2)
val Defn.Val(Nil, List(Pat.Var.Term(Term.Name("x"))), None, Term.Select(Term.Name("xtemp"), Term.Name("x"))) = vals(0)
val Defn.Val(Nil, List(Pat.Var.Term(Term.Name("y"))), None, Term.Select(Term.Name("ytemp"), Term.Name("y"))) = vals(1)
}
test("result") {
// FIXME: this test started giving a spurious cyclic reference error after a refactoring in quasiquote macros
// val x = Term.Name("x")
// val y = Term.Name("y")
// val valsin = List(
// Defn.Val(Nil, List(Pat.Var.Term(Term.Name("x"))), None, Term.Select(Term.Name("xtemp"), Term.Name("x"))),
// Defn.Val(Nil, List(Pat.Var.Term(Term.Name("y"))), None, Term.Select(Term.Name("ytemp"), Term.Name("y"))))
// val result = {
// import scala.meta._
// q"""
// val xtemp = $x
// val ytemp = $y
// new { ..$valsin }
// """
// }
// val Term.Block(stats) = result
// assert(stats.length === 3)
// val Defn.Val(Nil, List(Pat.Var.Term(Term.Name("xtemp"))), None, Term.Name("x")) = stats(0)
// val Defn.Val(Nil, List(Pat.Var.Term(Term.Name("ytemp"))), None, Term.Name("y")) = stats(1)
// val Term.New(Template(Nil, Nil, Term.Param(Nil, Name.Anonymous(), None, None), Some(valsout))) = stats(2)
// assert(valsout.length === 2)
// assert(valsout(0).toString === valsin(0).toString)
// assert(valsout(1).toString === valsin(1).toString)
}
}
|
mdemarne/scalameta
|
tests/src/test/scala/quasiquotes/JoinSuite.scala
|
Scala
|
bsd-3-clause
| 1,996
|
/**
* Copyright 2011-2016 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.commons.stats.assertion
import io.gatling.commons.util.NumberHelper._
trait Printable {
def printable: String
}
// ------------------- //
// -- Assertion ADT -- //
// ------------------- //
case class Assertion(path: AssertionPath, target: Target, condition: Condition)
// -------------- //
// -- Path ADT -- //
// -------------- //
sealed trait AssertionPath extends Printable
case object Global extends AssertionPath {
val printable = "Global"
}
case object ForAll extends AssertionPath {
val printable = "For all requests"
}
case class Details(parts: List[String]) extends AssertionPath {
def printable =
if (parts.isEmpty)
Global.printable
else
parts.mkString(" / ")
}
// ---------------- //
// -- Metric ADT -- //
// ---------------- //
sealed trait TimeMetric extends Printable
sealed trait CountMetric extends Printable
case object AllRequests extends CountMetric {
val printable = "all requests"
}
case object FailedRequests extends CountMetric {
val printable = "failed requests"
}
case object SuccessfulRequests extends CountMetric {
val printable = "successful requests"
}
case object ResponseTime extends TimeMetric {
val printable = "response time"
}
// ------------------- //
// -- Selection ADT -- //
// ------------------- //
sealed trait TimeSelection extends Printable
sealed trait CountSelection extends Printable
case object Count extends CountSelection {
val printable = "count"
}
case object Percent extends CountSelection {
val printable = "percentage"
}
case object PerMillion extends CountSelection {
val printable = "per_million"
}
case object Min extends TimeSelection {
val printable = "min"
}
case object Max extends TimeSelection {
val printable = "max"
}
case object Mean extends TimeSelection {
val printable = "mean"
}
case object StandardDeviation extends TimeSelection {
val printable = "standard deviation"
}
case class Percentiles(value: Double) extends TimeSelection {
val printable = s"${value.toRank} percentile"
}
// ---------------- //
// -- Target ADT -- //
// ---------------- //
sealed trait Target extends Printable
case class CountTarget(metric: CountMetric, selection: CountSelection) extends Target {
val printable = s"${selection.printable} of ${metric.printable}"
}
case class TimeTarget(metric: TimeMetric, selection: TimeSelection) extends Target {
val printable = s"${selection.printable} of ${metric.printable}"
}
case object MeanRequestsPerSecondTarget extends Target {
val printable = "mean requests per second"
}
// ------------------- //
// -- Condition ADT -- //
// ------------------- //
sealed trait Condition extends Printable {
def values: List[Int]
}
case class LessThan(value: Int) extends Condition {
val printable = "is less than"
override def values = List(value)
}
case class GreaterThan(value: Int) extends Condition {
val printable = "is greater than"
override def values = List(value)
}
case class Is(value: Int) extends Condition {
val printable = "is"
override def values = List(value)
}
case class Between(lowerBound: Int, upperBound: Int) extends Condition {
val printable = "is between"
override def values = List(lowerBound, upperBound)
}
case class In(elements: List[Int]) extends Condition {
val printable = "is in"
override def values = elements
}
|
GabrielPlassard/gatling
|
gatling-commons/src/main/scala/io/gatling/commons/stats/assertion/AssertionModel.scala
|
Scala
|
apache-2.0
| 3,963
|
package com.mentatlabs.nsa
package scalac
package options
/* -help
* =====
* 2.0.0 - 2.12.0: Print a synopsis of standard options
*/
case object ScalacHelp
extends ScalacOptionBoolean("-help", ScalacVersions.`2.0.0`)
|
melezov/sbt-nsa
|
nsa-core/src/main/scala/com/mentatlabs/nsa/scalac/options/ScalacHelp.scala
|
Scala
|
bsd-3-clause
| 228
|
package bad.robot.temperature
object AutoClosing {
def closingAfterUse[A <: AutoCloseable, B](resource: A)(f: A => B): B = try {
f(resource)
} finally {
resource.close()
}
}
|
tobyweston/temperature-machine
|
src/main/scala/bad/robot/temperature/AutoClosing.scala
|
Scala
|
apache-2.0
| 189
|
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
import com.intellij.psi.stubs.StubElement
import org.jetbrains.plugins.scala.lang.psi.api.base.ScStableCodeReferenceElement
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.ScImportExpr
/**
* User: Alexander Podkhalyuzin
* Date: 20.06.2009
*/
trait ScImportExprStub extends StubElement[ScImportExpr] {
def reference: Option[ScStableCodeReferenceElement]
def isSingleWildcard: Boolean
}
|
triggerNZ/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/stubs/ScImportExprStub.scala
|
Scala
|
apache-2.0
| 488
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.matchers.dsl
import org.scalatest._
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers._
class ResultOfBeWordForNoExceptionSpec extends AnyFunSpec {
describe("ResultOfBeWordForNoException ") {
it("should have pretty toString when used") {
val result = noException should be
result.toString should be ("ResultOfBeWordForNoException")
}
}
}
|
scalatest/scalatest
|
jvm/scalatest-test/src/test/scala/org/scalatest/matchers/dsl/ResultOfBeWordForNoExceptionSpec.scala
|
Scala
|
apache-2.0
| 1,039
|
package org.nikosoft.oanda.api.`def`
import org.nikosoft.oanda.api.ApiModel.AccountModel.AccountID
import org.nikosoft.oanda.api.ApiModel.PositionModel.Position
import org.nikosoft.oanda.api.ApiModel.PrimitivesModel.InstrumentName
import org.nikosoft.oanda.api.ApiModel.TransactionModel._
import org.nikosoft.oanda.api.Errors.Error
import org.nikosoft.oanda.api.`def`.PositionApi.{ClosePositionRequest, ClosePositionResponse, PositionsResponse}
import scalaz.\\/
object PositionApi {
/**
* @param positions The list of Account Positions.
* @param lastTransactionID The ID of the most recent Transaction created for the Account
*/
case class PositionsResponse(positions: Seq[Position], lastTransactionID: TransactionID)
/**
* @param longUnits Indication of how much of the long Position to closeout. Either the
* string “ALL”, the string “NONE”, or a DecimalNumber representing how many
* units of the long position to close using a PositionCloseout MarketOrder.
* The units specified must always be positive.
* @param longClientExtensions The client extensions to add to the MarketOrder used to close the long
* position.
* @param shortUnits Indication of how much of the short Position to closeout. Either the
* string “ALL”, the string “NONE”, or a DecimalNumber representing how many
* units of the short position to close using a PositionCloseout
* MarketOrder. The units specified must always be positive.
* @param shortClientExtensions The client extensions to add to the MarketOrder used to close the short
* position.
*/
case class ClosePositionRequest(longUnits: Option[String] = None, longClientExtensions: Option[ClientExtensions] = None, shortUnits: Option[String] = None, shortClientExtensions: Option[ClientExtensions] = None)
/**
* @param longOrderCreateTransaction The MarketOrderTransaction created to close the long Position.
* @param longOrderFillTransaction OrderFill Transaction that closes the long Position
* @param longOrderCancelTransaction OrderCancel Transaction that cancels the MarketOrder created to close the long Position
* @param shortOrderCreateTransaction The MarketOrderTransaction created to close the short Position.
* @param shortOrderFillTransaction OrderFill Transaction that closes the short Position
* @param shortOrderCancelTransaction OrderCancel Transaction that cancels the MarketOrder created to close the short Position
* @param relatedTransactionIDs The IDs of all Transactions that were created while satisfying the request.
* @param lastTransactionID The ID of the most recent Transaction created for the Account
*/
case class ClosePositionResponse(longOrderCreateTransaction: Option[MarketOrderTransaction],
longOrderFillTransaction: Option[OrderFillTransaction],
longOrderCancelTransaction: Option[OrderCancelTransaction],
shortOrderCreateTransaction: Option[MarketOrderTransaction],
shortOrderFillTransaction: Option[OrderFillTransaction],
shortOrderCancelTransaction: Option[OrderCancelTransaction],
relatedTransactionIDs: Seq[TransactionID],
lastTransactionID: Option[TransactionID])
}
trait PositionApi {
/**
* List all Positions for an Account. The Positions returned are for every instrument that has had a position
* during the lifetime of an the Account.
*
* @param accountId Account Identifier [required]
* @return The Account’s Positions are provided.
*/
def positions(accountId: AccountID): \\/[Error, PositionsResponse]
/**
* List all open Positions for an Account. An open Position is a Position in an Account that currently has a Trade opened for it.
*
* @param accountId Account Identifier [required]
* @return The Account’s open Positions are provided.
*/
def openPositions(accountId: AccountID): \\/[Error, PositionsResponse]
/**
* Closeout the open Position for a specific instrument in an Account.
*
* @param accountId Account Identifier [required]
* @param instrument Name of the Instrument [required]
* @param closePositionRequest request body
* @return The Position closeout request has been successfully processed.
*/
def closePosition(accountId: AccountID, instrument: InstrumentName, closePositionRequest: ClosePositionRequest): \\/[Error, ClosePositionResponse]
}
|
cnnickolay/forex-trader
|
oanda-scala-api/src/main/scala/org/nikosoft/oanda/api/def/PositionApi.scala
|
Scala
|
mit
| 4,901
|
package sbtmarathon
import java.net.{URL, URLEncoder, InetSocketAddress}
import scala.concurrent.{Await, Future, Promise}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import com.twitter.finagle.{Http, Name, Address}
import com.twitter.finagle.http.{RequestBuilder, Request, Response}
import com.twitter.io.Buf
import com.twitter.util.Base64StringEncoder
import org.json4sbt._
import org.json4sbt.jackson.JsonMethods._
import org.scalactic.{Or, Good, Bad}
class MarathonService(url: URL) {
import MarathonService._
val port = if (url.getPort < 0) url.getDefaultPort else url.getPort
val apiUrl = UrlUtil.copy(url, port = port, path = RestApiPath)
def start(jsonString: String): Result Or Throwable = {
val request = RequestBuilder()
.url(apiUrl)
.setHeader("Content-type", JsonContentType)
.buildPost(jsonString)
executeRequest(request)
}
def destroy(applicationId: String): Result Or Throwable = {
val url = instanceServiceUrl(applicationId)
val request = RequestBuilder()
.url(url)
.buildDelete()
executeRequest(request, url = url)
}
def update(applicationId: String, jsonString: String): Result Or Throwable = {
val url = instanceServiceUrl(applicationId)
val request = RequestBuilder()
.url(url)
.setHeader("Content-type", JsonContentType)
.buildPut(jsonString)
executeRequest(request, url)
}
def restart(applicationId: String): Result Or Throwable = {
val instanceUrl = instanceServiceUrl(applicationId)
val url = UrlUtil.copy(instanceUrl, path = instanceUrl.getPath + "/restart")
val request = RequestBuilder()
.url(url)
.setHeader("Content-type", JsonContentType)
.buildPost(Buf.Empty)
executeRequest(request, url)
}
def scale(applicationId: String, numInstances: Int): Result Or Throwable = {
val url = instanceServiceUrl(applicationId)
val jsonString = s"""{"instances":$numInstances}"""
val request = RequestBuilder()
.url(url)
.setHeader("Content-type", JsonContentType)
.buildPut(jsonString)
executeRequest(request, url)
}
def executeRequest(request: Request, url: URL = this.url): Result Or Throwable = {
val host = url.getHost
val port = if (url.getPort < 0) url.getDefaultPort else url.getPort
val addr = Address(new InetSocketAddress(host, port))
val client = if (url.getProtocol == "https") Http.client.withTlsWithoutValidation else Http.client
Option(url.getUserInfo).foreach { credentials =>
val encodedCredentials = Base64StringEncoder.encode(credentials.getBytes("UTF-8"))
request.authorization = s"Basic $encodedCredentials"
}
val service = client.newService(Name.bound(addr), "")
val response = service(request).ensure { service.close() }
val promise = Promise[Response]
response.onSuccess(promise.success _)
response.onFailure(promise.failure _)
val future = promise.future.map { response =>
val responseString = response.contentString
val result = response.statusCode match {
case n if n >= 200 && n < 400 => Success(responseString)
case n if n >= 400 && n < 500 => UserError(responseString)
case n if n >= 500 => SystemError(responseString)
}
Good(result)
}
try {
Await.result(future, Duration.Inf)
} catch {
case e: Exception => Bad(e)
}
}
def instanceServiceUrl(applicationId: String): URL = {
UrlUtil.copy(url, port = port, path = RestApiPath + s"/$applicationId")
}
def instanceGuiUrl(applicationId: String): URL = {
val fragment = "/apps/" + URLEncoder.encode(s"/$applicationId", "UTF-8")
UrlUtil.copy(url, port = port, path = GuiPath, fragment = fragment)
}
}
object MarathonService {
sealed trait Result {
implicit val formats = DefaultFormats
def responseString: String
lazy val responseJson: JValue = parse(responseString)
lazy val message: Option[String] = (responseJson \\ "message").extractOpt[String]
}
case class Success(responseString: String) extends Result
case class UserError(responseString: String) extends Result
case class SystemError(responseString: String) extends Result
val RestApiPath = "/v2/apps"
val GuiPath = "/ui/"
val JsonContentType = "application/json"
implicit def jsonStringToBuf(jsonString: String): Buf = {
val jsonBytes = jsonString.getBytes("UTF-8")
Buf.ByteArray(jsonBytes: _*)
}
}
|
Tapad/sbt-marathon
|
marathon/src/main/scala/sbtmarathon/MarathonService.scala
|
Scala
|
bsd-3-clause
| 4,510
|
package chandu0101.scalajs.react.components.materialui
import chandu0101.macros.tojs.JSMacro
import japgolly.scalajs.react._
import materialui.Mui
import scala.scalajs.js
case class MuiListDivider(key: js.UndefOr[String] = js.undefined,
style: js.UndefOr[js.Any] = js.undefined,
ref: js.UndefOr[String] = js.undefined,
inset: js.UndefOr[Boolean] = js.undefined) {
def apply() = {
val props = JSMacro[MuiListDivider](this)
val f = React.asInstanceOf[js.Dynamic].createFactory(Mui.ListDivider)
f(props).asInstanceOf[ReactComponentU_]
}
}
case class MuiListItem(secondaryText: js.UndefOr[ReactElement] = js.undefined,
style: js.UndefOr[js.Any] = js.undefined,
disableTouchTap: js.UndefOr[Boolean] = js.undefined,
insetChildren: js.UndefOr[Boolean] = js.undefined,
ref: js.UndefOr[String] = js.undefined,
rightAvatar: js.UndefOr[ReactElement] = js.undefined,
leftAvatar: js.UndefOr[ReactElement] = js.undefined,
key: js.UndefOr[String] = js.undefined,
onMouseOver: js.UndefOr[ReactEvent => Unit] = js.undefined,
onMouseOut: js.UndefOr[ReactEvent => Unit] = js.undefined,
secondaryTextLines: js.UndefOr[Int] = js.undefined,
leftIcon: js.UndefOr[ReactElement] = js.undefined,
rightIcon: js.UndefOr[ReactElement] = js.undefined,
rightToggle: js.UndefOr[ReactElement] = js.undefined,
leftCheckbox: js.UndefOr[ReactElement] = js.undefined) {
def apply(children: ReactNode*) = {
val props = JSMacro[MuiListItem](this)
val f = React.asInstanceOf[js.Dynamic].createFactory(Mui.ListItem)
f(props, children.toJsArray).asInstanceOf[ReactComponentU_]
}
}
case class MuiList(subheaderStyle: js.UndefOr[js.Any] = js.undefined,
insetSubheader: js.UndefOr[Boolean] = js.undefined,
style: js.UndefOr[js.Any] = js.undefined,
ref: js.UndefOr[String] = js.undefined,
key: js.UndefOr[String] = js.undefined,
subheader: js.UndefOr[String] = js.undefined) {
def apply(children: ReactNode*) = {
val props = JSMacro[MuiList](this)
val f = React.asInstanceOf[js.Dynamic].createFactory(Mui.List)
f(props, children.toJsArray).asInstanceOf[ReactComponentU_]
}
}
|
mproch/scalajs-react-components
|
core/src/main/scala/chandu0101/scalajs/react/components/materialui/MuiLists.scala
|
Scala
|
apache-2.0
| 2,581
|
// package scala.util.control
object TailCalls {
abstract class TailRec[+A] {
final def flatMap[B](f: A => TailRec[B]): TailRec[B] =
this match {
case Done(a) => Call(() => f(a))
case c@Call(_) => Cont(c, f)
case c: Cont[a1, b1] => Cont(c.a, (x: a1) => c.f(x) flatMap f)
}
@annotation.tailrec final def resume: Either[() => TailRec[A], A] = this match {
case Done(a) => Right(a)
case Call(k) => Left(k)
case Cont(a, f) => a match {
case Done(v) => f(v).resume
case Call(k) => Left(() => k().flatMap(f))
case Cont(b, g) => b.flatMap(x => g(x) flatMap f).resume
}
}
@annotation.tailrec final def result: A = this match {
case Done(a) => a
case Call(t) => t().result
case Cont(a, f) => a match {
case Done(v) => f(v).result
case Call(t) => t().flatMap(f).result
case Cont(b, g) => b.flatMap(x => g(x) flatMap f).result
}
}
}
protected case class Call[A](rest: () => TailRec[A]) extends TailRec[A]
protected case class Done[A](value: A) extends TailRec[A]
protected case class Cont[A, B](a: TailRec[A], f: A => TailRec[B]) extends TailRec[B]
}
|
lampepfl/dotty
|
tests/pos/gadt-TailCalls.scala
|
Scala
|
apache-2.0
| 1,209
|
package bad.robot
import simplehttp.configuration.{Username => SimpleHttpUsername}
import simplehttp.configuration.{Password => SimpleHttpPassword}
import bad.robot.radiate.config.{Password, Username}
package object http {
implicit def toUsername(username: Username): SimpleHttpUsername = {
SimpleHttpUsername.username(username.value)
}
implicit def toPassword(password: Password): SimpleHttpPassword = {
SimpleHttpPassword.password(password.value)
}
}
|
tobyweston/radiate
|
src/main/scala/bad/robot/package.scala
|
Scala
|
apache-2.0
| 473
|
/*
* Copyright 2014 Michael Krolikowski
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.mkroli.dns4s.section.resource
import java.net.{Inet4Address, InetAddress}
import com.github.mkroli.dns4s.section.ResourceRecord
import com.github.mkroli.dns4s.{MessageBuffer, bytes}
import org.scalatest.funspec.AnyFunSpec
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks
import scala.language.implicitConversions
class AResourceSpec extends AnyFunSpec with ScalaCheckPropertyChecks {
describe("AResource") {
describe("encoding/decoding") {
def inet4Address(b: String) =
InetAddress.getByAddress(bytes(b).toArray).asInstanceOf[Inet4Address]
it("decode(encode(resource)) should be the same as resource") {
forAll { (a: Byte, b: Byte, c: Byte, d: Byte) =>
val ar = AResource(InetAddress.getByAddress(Array(a, b, c, d)).asInstanceOf[Inet4Address])
assert(ar === AResource(ar(MessageBuffer()).flipped()))
val encoded = ar(MessageBuffer()).flipped()
assert(Array(a, b, c, d) === encoded.getBytes(encoded.remaining()))
}
}
it("should be decoded wrapped in ResourceRecord") {
val rr = ResourceRecord("test", ResourceRecord.typeA, 0, 0, AResource(inet4Address("FF 0F F0 FF")))
val a = rr(MessageBuffer()).flipped()
val b = bytes("04 74 65 73 74 00 0001 0000 00000000 0004 FF 0F F0 FF")
assert(b === a.getBytes(a.remaining()))
assert(rr === ResourceRecord(MessageBuffer().put(b.toArray).flipped()))
}
}
}
}
|
mkroli/dns4s
|
core/src/test/scala/com/github/mkroli/dns4s/section/resource/AResourceSpec.scala
|
Scala
|
apache-2.0
| 2,086
|
case class Matrix(in: Seq[Seq[Int]]) {
val minRows: Seq[Int] = in.map(_.max).toArray
val minCols: Seq[Int] = in.transpose.map(_.min).toArray
val len: Int = in.length
// (9, 8, 7)
// (5, 3, 2)
// (6, 6, 7)
// == (1, 0) max in row, min in col
def saddlePoints: Set[(Int, Int)] = {
var ret = for {
row <- 0 until len
col <- 0 until len
if in(row)(col) == minRows(row) && in(row)(col) == minCols(col)
} yield (row, col)
ret.toSet
}
}
|
daewon/til
|
exercism/scala/saddle-points/src/main/scala/saddlePoint.scala
|
Scala
|
mpl-2.0
| 483
|
import sbt.Keys._
import play.sbt.routes.RoutesKeys._
import sbt.Tests.{SubProcess, Group}
import sbt._
import play.routes.compiler.StaticRoutesGenerator
import uk.gov.hmrc.sbtdistributables.SbtDistributablesPlugin._
trait MicroService {
import uk.gov.hmrc._
import DefaultBuildSettings._
import uk.gov.hmrc.{SbtBuildInfo, ShellPrompt, SbtAutoBuildPlugin}
import uk.gov.hmrc.sbtdistributables.SbtDistributablesPlugin
import uk.gov.hmrc.versioning.SbtGitVersioning
import play.sbt.routes.RoutesKeys.routesGenerator
import TestPhases._
val appName: String
lazy val appDependencies : Seq[ModuleID] = ???
lazy val plugins : Seq[Plugins] = Seq.empty
lazy val playSettings : Seq[Setting[_]] = Seq.empty
lazy val microservice = Project(appName, file("."))
.enablePlugins(Seq(play.sbt.PlayScala,SbtAutoBuildPlugin, SbtGitVersioning, SbtDistributablesPlugin) ++ plugins : _*)
.settings(playSettings : _*)
.settings(scalaSettings: _*)
.settings(publishingSettings: _*)
.settings(defaultSettings(): _*)
.settings(
scalaVersion in ThisBuild := "2.11.8",
libraryDependencies ++= appDependencies,
evictionWarningOptions in update := EvictionWarningOptions.default.withWarnScalaVersionEviction(false),
routesImport ++= Seq(
"uk.gov.hmrc.bforms.binders.ValueClassBinder._",
"uk.gov.hmrc.bforms.models.FormTypeId",
"uk.gov.hmrc.bforms.models.FormId"
),
scalacOptions ++= Seq(
"-Xfatal-warnings",
"-Xlint:-missing-interpolator,_",
"-Yno-adapted-args",
"-Ywarn-numeric-widen",
"-Ywarn-value-discard",
"-Ywarn-dead-code",
"-deprecation",
"-feature",
"-unchecked"
)
)
.configs(IntegrationTest)
.settings(inConfig(IntegrationTest)(Defaults.itSettings): _*)
.settings(
Keys.fork in IntegrationTest := false,
unmanagedSourceDirectories in IntegrationTest := Seq((baseDirectory in IntegrationTest).value / "it"),
addTestReportOption(IntegrationTest, "int-test-reports"),
testGrouping in IntegrationTest := oneForkedJvmPerTest((definedTests in IntegrationTest).value),
parallelExecution in IntegrationTest := false)
.settings(resolvers ++= Seq(
Resolver.bintrayRepo("hmrc", "releases"),
Resolver.jcenterRepo,
"bintray-djspiewak-maven" at "https://dl.bintray.com/djspiewak/maven"
))
}
private object TestPhases {
def oneForkedJvmPerTest(tests: Seq[TestDefinition]) =
tests map {
test => new Group(test.name, Seq(test), SubProcess(ForkOptions(runJVMOptions = Seq("-Dtest.name=" + test.name))))
}
}
|
VlachJosef/bforms
|
project/MicroService.scala
|
Scala
|
apache-2.0
| 2,665
|
package models
import org.joda.time.DateTime
import play.api.libs.json._
import play.api.libs.functional.syntax._
case class ZWayEvent(
foreignDeviceId:String,
status:Option[String] = None,
eventType: Option[String] = None,
updateTime:Option[DateTime] = None,
value: Option[String] = None,
deviceType: Option[String] = None,
deviceName: Option[String] = None,
cmdClass: Option[Int] = None,
instanceId: Option[Int] = None,
meterType: Option[Int] = None,
sensorType: Option[String] = None,
scaleUnit: Option[String] = None,
probeType: Option[String] = None
)
object ZWayEvent {
implicit val reads: Reads[ZWayEvent] = (
(JsPath \\ "foreignDeviceId").read[String] and
(JsPath \\ "status").readNullable[String] and
(JsPath \\ "eventType").readNullable[String] and
(JsPath \\ "updateTime").readNullable[Int].map(epochToDate(_)) and
(JsPath \\ "value").readNullable[String] and
(JsPath \\ "deviceType").readNullable[String] and
(JsPath \\ "deviceName").readNullable[String] and
(JsPath \\ "cmdClass").readNullable[Int] and
(JsPath \\ "instanceId").readNullable[Int] and
(JsPath \\ "meterType").readNullable[Int] and
(JsPath \\ "sensorType").readNullable[String] and
(JsPath \\ "scaleUnit").readNullable[String] and
(JsPath \\ "probeType").readNullable[String]
)( ZWayEvent.apply _ )
val epochWrites: Writes[DateTime] = new Writes[DateTime] {
override def writes(d: DateTime): JsValue = {
val sec:Long = d.getMillis / 1000
JsNumber(sec)
}
}
implicit var writes: Writes[ZWayEvent] = (
(JsPath \\ "foreignDeviceId").write[String] and
(JsPath \\ "status").writeNullable[String] and
(JsPath \\ "eventType").writeNullable[String] and
(JsPath \\ "updateTime").writeNullable[DateTime](epochWrites) and
(JsPath \\ "value").writeNullable[String] and
(JsPath \\ "deviceType").writeNullable[String] and
(JsPath \\ "deviceName").writeNullable[String] and
(JsPath \\ "cmdClass").writeNullable[Int] and
(JsPath \\ "instanceId").writeNullable[Int] and
(JsPath \\ "meterType").writeNullable[Int] and
(JsPath \\ "sensorType").writeNullable[String] and
(JsPath \\ "scaleUnit").writeNullable[String] and
(JsPath \\ "probeType").writeNullable[String]
) ( unlift(ZWayEvent.unapply) )
private def epochToDate(epoch:Option[Int]): Option[DateTime] = {
epoch match {
case Some(e) => {
val milis = e.toLong*1000
Some(new DateTime(milis))
}
case None => None
}
}
}
|
LabCo/rpi-zway-play
|
app/models/ZWayEvent.scala
|
Scala
|
mit
| 2,519
|
/*
* Copyright (C) 2014 - 2017 Contributors as noted in the AUTHORS.md file
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package databases.sqlite
import java.io.File
import java.net.URI
import akka.testkit.{ TestActorRef, TestFSMRef }
import com.wegtam.scalatest.tags.{ DbTest, DbTestSqlite }
import com.wegtam.tensei.adt._
import com.wegtam.tensei.agent.{ ActorSpec, DummyActor, TenseiAgent }
import org.scalatest.BeforeAndAfterEach
import scala.concurrent.duration._
class ForeignKeysTest extends ActorSpec with BeforeAndAfterEach {
val SQLITE_FILE = File.createTempFile("tensei-agent", "testSqlite.db")
override protected def beforeEach(): Unit = {
val sqliteFile = SQLITE_FILE.getAbsolutePath.replace("\\\\", "/")
java.sql.DriverManager.getConnection(s"jdbc:sqlite:$sqliteFile")
createSourceData()
super.beforeEach()
}
override protected def afterEach(): Unit = {
val sqliteFile = SQLITE_FILE.getAbsolutePath.replace("\\\\", "/")
val connection = java.sql.DriverManager.getConnection(s"jdbc:sqlite:$sqliteFile")
val s = connection.createStatement()
val tables =
s.executeQuery("SELECT name AS TABLE_NAME FROM sqlite_master WHERE type = 'table'")
while (tables.next()) {
s.execute(s"DROP TABLE ${tables.getString("TABLE_NAME")}")
}
connection.close()
SQLITE_FILE.delete()
super.afterEach()
}
private def createSourceData(): Unit = {
val sqliteFile = SQLITE_FILE.getAbsolutePath.replace("\\\\", "/")
val c = java.sql.DriverManager.getConnection(s"jdbc:sqlite:$sqliteFile")
val statement = c.createStatement()
val r = statement.executeQuery("PRAGMA foreign_keys")
// If the query did not return an Integer, the SQLite version does not support foreign keys
if (r != null) {
val status = r.getInt(1)
// Activate foreign keys for SQLite
if (status == 0) {
statement.execute("PRAGMA foreign_keys = ON;")
}
}
val s = c.createStatement()
s.execute("""
|CREATE TABLE employees (
| id INTEGER PRIMARY KEY,
| firstname VARCHAR(254),
| lastname VARCHAR(254),
| birthday TEXT
|)
""".stripMargin)
s.execute("""
|CREATE TABLE salary (
| employee_id INTEGER,
| amount DECIMAL(10,2),
| FOREIGN KEY (employee_id) REFERENCES employees(id)
|)
""".stripMargin)
s.execute(
"""INSERT INTO employees (id, firstname, lastname, birthday) VALUES(123, 'Albert', 'Einstein', '1879-03-14')"""
)
s.execute("""INSERT INTO salary (employee_id, amount) VALUES(123, 3.14)""")
s.execute(
"""INSERT INTO employees (id, firstname, lastname, birthday) VALUES(456, 'Bernhard', 'Riemann', '1826-09-17')"""
)
s.execute("""INSERT INTO salary (employee_id, amount) VALUES(456, 6.28)""")
s.execute(
"""INSERT INTO employees (id, firstname, lastname, birthday) VALUES(789, 'Johann Carl Friedrich', 'Gauß', '1777-04-30')"""
)
s.execute("""INSERT INTO salary (employee_id, amount) VALUES(789, 12.56)""")
s.execute(
"""INSERT INTO employees (id, firstname, lastname, birthday) VALUES(5, 'Johann Benedict', 'Listing', '1808-07-25')"""
)
s.execute("""INSERT INTO salary (employee_id, amount) VALUES(5, 25.12)""")
s.execute(
"""INSERT INTO employees (id, firstname, lastname, birthday) VALUES(8, 'Gottfried Wilhelm', 'Leibnitz', '1646-07-01')"""
)
s.execute("""INSERT INTO salary (employee_id, amount) VALUES(8, 50.24)""")
()
}
describe("Foreign keys") {
describe("using sqlite") {
describe("using one to one mappings") {
describe("with single mappings") {
it("should replace changed auto-increment values", DbTest, DbTestSqlite) {
val sqliteFile = SQLITE_FILE.getAbsolutePath.replace("\\\\", "/")
val connection = java.sql.DriverManager.getConnection(s"jdbc:sqlite:$sqliteFile")
val sourceDfasdl = new DFASDL(
id = "SRC",
content = scala.io.Source
.fromInputStream(
getClass.getResourceAsStream("/databases/generic/ForeignKeys/source-dfasdl.xml")
)
.mkString
)
val targetDfasdl = new DFASDL(
id = "DST",
content = scala.io.Source
.fromInputStream(
getClass.getResourceAsStream("/databases/generic/ForeignKeys/target-dfasdl.xml")
)
.mkString
)
val cookbook: Cookbook = Cookbook(
id = "COOKBOOK",
sources = List(sourceDfasdl),
target = Option(targetDfasdl),
recipes = List(
Recipe(
id = "CopyEmployees",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
sources = List(
ElementReference(dfasdlId = sourceDfasdl.id, elementId = "employees_row_id")
),
targets = List(
ElementReference(dfasdlId = targetDfasdl.id, elementId = "employees_row_id")
)
),
MappingTransformation(
sources = List(
ElementReference(dfasdlId = sourceDfasdl.id,
elementId = "employees_row_firstname")
),
targets = List(
ElementReference(dfasdlId = targetDfasdl.id,
elementId = "employees_row_firstname")
)
),
MappingTransformation(
sources = List(
ElementReference(dfasdlId = sourceDfasdl.id,
elementId = "employees_row_lastname")
),
targets = List(
ElementReference(dfasdlId = targetDfasdl.id,
elementId = "employees_row_lastname")
)
),
MappingTransformation(
sources = List(
ElementReference(dfasdlId = sourceDfasdl.id,
elementId = "employees_row_birthday")
),
targets = List(
ElementReference(dfasdlId = targetDfasdl.id,
elementId = "employees_row_birthday")
)
)
)
),
Recipe(
id = "CopySalaries",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
sources = List(
ElementReference(dfasdlId = sourceDfasdl.id,
elementId = "salary_row_employee_id")
),
targets = List(
ElementReference(dfasdlId = targetDfasdl.id,
elementId = "salary_row_employee_id")
)
),
MappingTransformation(
sources = List(
ElementReference(dfasdlId = sourceDfasdl.id,
elementId = "salary_row_amount")
),
targets = List(
ElementReference(dfasdlId = targetDfasdl.id,
elementId = "salary_row_amount")
)
)
)
)
)
)
val source = ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef =
Option(DFASDLReference(cookbookId = cookbook.id, dfasdlId = sourceDfasdl.id))
)
val target = ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef =
Option(DFASDLReference(cookbookId = cookbook.id, dfasdlId = targetDfasdl.id))
)
val dummy = TestActorRef(DummyActor.props())
val client = system.actorSelection(dummy.path)
val agent = TestFSMRef(new TenseiAgent("TEST-AGENT", client))
val msg = AgentStartTransformationMessage(
sources = List(source),
target = target,
cookbook = cookbook,
uniqueIdentifier = Option("FOREIGN-KEY-TEST-OneToOne")
)
agent ! msg
expectMsgType[GlobalMessages.TransformationStarted](FiniteDuration(5, SECONDS))
expectMsgType[GlobalMessages.TransformationCompleted](FiniteDuration(7, SECONDS))
val s = connection.createStatement()
withClue("Written data should be correct!") {
val expectedData = Map(
"Einstein" -> new java.math.BigDecimal("3.14"),
"Riemann" -> new java.math.BigDecimal("6.28"),
"Gauß" -> new java.math.BigDecimal("12.56"),
"Listing" -> new java.math.BigDecimal("25.12"),
"Leibnitz" -> new java.math.BigDecimal("50.24")
)
val r = s.executeQuery(
"SELECT t_employees.id AS id, t_employees.lastname AS name, t_salary.amount AS amount FROM t_employees JOIN t_salary ON t_employees.id = t_salary.employee_id"
)
if (r.next()) {
r.getBigDecimal("amount") should be(expectedData(r.getString("name")))
while (r.next()) {
r.getBigDecimal("amount") should be(expectedData(r.getString("name")))
}
} else
fail("No results found in database!")
}
}
}
describe("with bulk mappings") {
it("should replace changed auto-increment values", DbTest, DbTestSqlite) {
val sqliteFile = SQLITE_FILE.getAbsolutePath.replace("\\\\", "/")
val connection = java.sql.DriverManager.getConnection(s"jdbc:sqlite:$sqliteFile")
val sourceDfasdl = new DFASDL(
id = "SRC",
content = scala.io.Source
.fromInputStream(
getClass.getResourceAsStream("/databases/generic/ForeignKeys/source-dfasdl.xml")
)
.mkString
)
val targetDfasdl = new DFASDL(
id = "DST",
content = scala.io.Source
.fromInputStream(
getClass.getResourceAsStream("/databases/generic/ForeignKeys/target-dfasdl.xml")
)
.mkString
)
val cookbook: Cookbook = Cookbook(
id = "COOKBOOK",
sources = List(sourceDfasdl),
target = Option(targetDfasdl),
recipes = List(
Recipe(
id = "CopyEmployees",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
sources = List(
ElementReference(dfasdlId = sourceDfasdl.id,
elementId = "employees_row_id"),
ElementReference(dfasdlId = sourceDfasdl.id,
elementId = "employees_row_firstname"),
ElementReference(dfasdlId = sourceDfasdl.id,
elementId = "employees_row_lastname"),
ElementReference(dfasdlId = sourceDfasdl.id,
elementId = "employees_row_birthday")
),
targets = List(
ElementReference(dfasdlId = targetDfasdl.id,
elementId = "employees_row_id"),
ElementReference(dfasdlId = targetDfasdl.id,
elementId = "employees_row_firstname"),
ElementReference(dfasdlId = targetDfasdl.id,
elementId = "employees_row_lastname"),
ElementReference(dfasdlId = targetDfasdl.id,
elementId = "employees_row_birthday")
)
)
)
),
Recipe(
id = "CopySalaries",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
sources = List(
ElementReference(dfasdlId = sourceDfasdl.id,
elementId = "salary_row_employee_id"),
ElementReference(dfasdlId = sourceDfasdl.id,
elementId = "salary_row_amount")
),
targets = List(
ElementReference(dfasdlId = targetDfasdl.id,
elementId = "salary_row_employee_id"),
ElementReference(dfasdlId = targetDfasdl.id,
elementId = "salary_row_amount")
)
)
)
)
)
)
val source = ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef =
Option(DFASDLReference(cookbookId = cookbook.id, dfasdlId = sourceDfasdl.id))
)
val target = ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef =
Option(DFASDLReference(cookbookId = cookbook.id, dfasdlId = targetDfasdl.id))
)
val dummy = TestActorRef(DummyActor.props())
val client = system.actorSelection(dummy.path)
val agent = TestFSMRef(new TenseiAgent("TEST-AGENT", client))
val msg = AgentStartTransformationMessage(
sources = List(source),
target = target,
cookbook = cookbook,
uniqueIdentifier = Option("FOREIGN-KEY-TEST-OneToOne")
)
agent ! msg
expectMsgType[GlobalMessages.TransformationStarted](FiniteDuration(5, SECONDS))
expectMsgType[GlobalMessages.TransformationCompleted](FiniteDuration(7, SECONDS))
val s = connection.createStatement()
withClue("Written data should be correct!") {
val expectedData = Map(
"Einstein" -> new java.math.BigDecimal("3.14"),
"Riemann" -> new java.math.BigDecimal("6.28"),
"Gauß" -> new java.math.BigDecimal("12.56"),
"Listing" -> new java.math.BigDecimal("25.12"),
"Leibnitz" -> new java.math.BigDecimal("50.24")
)
val r = s.executeQuery(
"SELECT t_employees.id AS id, t_employees.lastname AS name, t_salary.amount AS amount FROM t_employees JOIN t_salary ON t_employees.id = t_salary.employee_id"
)
if (r.next()) {
r.getBigDecimal("amount") should be(expectedData(r.getString("name")))
while (r.next()) {
r.getBigDecimal("amount") should be(expectedData(r.getString("name")))
}
} else
fail("No results found in database!")
}
}
}
}
describe("using all to all mappings") {
it("should replace changed auto-increment values", DbTest, DbTestSqlite) {
val sqliteFile = SQLITE_FILE.getAbsolutePath.replace("\\\\", "/")
val connection = java.sql.DriverManager.getConnection(s"jdbc:sqlite:$sqliteFile")
val sourceDfasdl = new DFASDL(
id = "SRC",
content = scala.io.Source
.fromInputStream(
getClass.getResourceAsStream("/databases/generic/ForeignKeys/source-dfasdl.xml")
)
.mkString
)
val targetDfasdl = new DFASDL(
id = "DST",
content = scala.io.Source
.fromInputStream(
getClass.getResourceAsStream("/databases/generic/ForeignKeys/target-dfasdl.xml")
)
.mkString
)
val cookbook: Cookbook = Cookbook(
id = "COOKBOOK",
sources = List(sourceDfasdl),
target = Option(targetDfasdl),
recipes = List(
Recipe(
id = "CopyEmployees",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
sources = List(
ElementReference(dfasdlId = sourceDfasdl.id, elementId = "employees_row_id")
),
targets = List(
ElementReference(dfasdlId = targetDfasdl.id, elementId = "employees_row_id")
)
),
MappingTransformation(
sources = List(
ElementReference(dfasdlId = sourceDfasdl.id,
elementId = "employees_row_firstname")
),
targets = List(
ElementReference(dfasdlId = targetDfasdl.id,
elementId = "employees_row_firstname")
)
),
MappingTransformation(
sources = List(
ElementReference(dfasdlId = sourceDfasdl.id,
elementId = "employees_row_lastname")
),
targets = List(
ElementReference(dfasdlId = targetDfasdl.id,
elementId = "employees_row_lastname")
)
),
MappingTransformation(
sources = List(
ElementReference(dfasdlId = sourceDfasdl.id,
elementId = "employees_row_birthday")
),
targets = List(
ElementReference(dfasdlId = targetDfasdl.id,
elementId = "employees_row_birthday")
)
)
)
),
Recipe(
id = "CopySalaries",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
sources = List(
ElementReference(dfasdlId = sourceDfasdl.id,
elementId = "salary_row_employee_id")
),
targets = List(
ElementReference(dfasdlId = targetDfasdl.id,
elementId = "salary_row_employee_id")
)
),
MappingTransformation(
sources = List(
ElementReference(dfasdlId = sourceDfasdl.id, elementId = "salary_row_amount")
),
targets = List(
ElementReference(dfasdlId = targetDfasdl.id, elementId = "salary_row_amount")
)
)
)
)
)
)
val source = ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef =
Option(DFASDLReference(cookbookId = cookbook.id, dfasdlId = sourceDfasdl.id))
)
val target = ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef =
Option(DFASDLReference(cookbookId = cookbook.id, dfasdlId = targetDfasdl.id))
)
val dummy = TestActorRef(DummyActor.props())
val client = system.actorSelection(dummy.path)
val agent = TestFSMRef(new TenseiAgent("TEST-AGENT", client))
val msg = AgentStartTransformationMessage(
sources = List(source),
target = target,
cookbook = cookbook,
uniqueIdentifier = Option("FOREIGN-KEY-TEST-OneToOne")
)
agent ! msg
expectMsgType[GlobalMessages.TransformationStarted](FiniteDuration(5, SECONDS))
expectMsgType[GlobalMessages.TransformationCompleted](FiniteDuration(7, SECONDS))
val s = connection.createStatement()
withClue("Written data should be correct!") {
val expectedData = Map(
"Einstein" -> new java.math.BigDecimal("3.14"),
"Riemann" -> new java.math.BigDecimal("6.28"),
"Gauß" -> new java.math.BigDecimal("12.56"),
"Listing" -> new java.math.BigDecimal("25.12"),
"Leibnitz" -> new java.math.BigDecimal("50.24")
)
val r = s.executeQuery(
"SELECT t_employees.id AS id, t_employees.lastname AS name, t_salary.amount AS amount FROM t_employees JOIN t_salary ON t_employees.id = t_salary.employee_id"
)
if (r.next()) {
r.getBigDecimal("amount") should be(expectedData(r.getString("name")))
while (r.next()) {
r.getBigDecimal("amount") should be(expectedData(r.getString("name")))
}
} else
fail("No results found in database!")
}
}
}
}
}
}
|
Tensei-Data/tensei-agent
|
src/it/scala/databases/sqlite/ForeignKeysTest.scala
|
Scala
|
agpl-3.0
| 22,872
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtInteger, Input}
case class CP113(value: Int) extends CtBoxIdentifier(name = "Net profit on sale of fixed assets") with CtInteger with Input
|
hmrc/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/computations/CP113.scala
|
Scala
|
apache-2.0
| 829
|
import scala.quoted.*
import scala.quoted.staging.*
object Test {
sealed trait Var {
def get(using Quotes): Expr[String]
def update(x: Expr[String])(using Quotes): Expr[Unit]
}
object Var {
def apply(init: Expr[String])(body: Var => Expr[String])(using Quotes): Expr[String] = '{
var x = $init
${
body(
new Var {
def get(using Quotes): Expr[String] = 'x
def update(e: Expr[String])(using Quotes): Expr[Unit] = '{ x = $e }
}
)
}
}
}
def test1()(using Quotes): Expr[String] = Var('{"abc"}) { x =>
'{
${ x.update('{"xyz"}) }
${ x.get }
}
}
def main(args: Array[String]): Unit = {
given Compiler = Compiler.make(getClass.getClassLoader)
val res = run {
test1()
}
println(res)
}
}
|
dotty-staging/dotty
|
tests/run-staging/quote-var.scala
|
Scala
|
apache-2.0
| 837
|
package models.upload
import java.sql.Timestamp
import com.overviewdocs.test.DbSpecification
class OverviewUploadedFileSpec extends DbSpecification {
"OverviewUploadedFile" should {
trait UploadedFileContext extends DbScope {
val before = new Timestamp(System.currentTimeMillis)
val overviewUploadedFile = OverviewUploadedFile(123L, "attachment; filename=name", "content-type")
}
"set uploadedAt time on creation" in new UploadedFileContext {
overviewUploadedFile.uploadedAt.compareTo(before) must be greaterThanOrEqualTo(0)
}
"withSize results in updated size and time uploadedAt" in new UploadedFileContext {
val uploadedFileWithNewSize = overviewUploadedFile.withSize(100)
uploadedFileWithNewSize.uploadedAt.compareTo(overviewUploadedFile.uploadedAt) must be greaterThanOrEqualTo (0)
}
"withContentInfo sets contentDisposition and contentType" in new UploadedFileContext {
val newDisposition = "new disposition"
val newType = "new type"
val uploadedFileWithNewContentInfo = overviewUploadedFile.withContentInfo(newDisposition, newType)
uploadedFileWithNewContentInfo.contentDisposition must be equalTo newDisposition
uploadedFileWithNewContentInfo.contentType must be equalTo newType
}
"be saveable and findable by id" in new UploadedFileContext {
val savedUploadedFile = overviewUploadedFile.save
savedUploadedFile.id must not be equalTo(0)
val foundUploadedFile = OverviewUploadedFile.findById(savedUploadedFile.id)
foundUploadedFile must beSome
}
"be deleted" in new UploadedFileContext {
val savedUploadedFile = overviewUploadedFile.save
savedUploadedFile.delete
val foundUploadedFile = OverviewUploadedFile.findById(savedUploadedFile.id)
foundUploadedFile must beNone
}
}
"OverviewUploadedFile filename" should {
"return filename from content-disposition" in {
val name = "file.name"
val overviewUploadedFile = OverviewUploadedFile(0, "attachment; filename=" + name, "content-type")
overviewUploadedFile.filename must be equalTo (name)
}
"return Upload <date> if no content-disposition found" in {
val overviewUploadedFile = OverviewUploadedFile(0, "bad-content-disposition", "content-type")
val now = new Timestamp(System.currentTimeMillis())
val defaultFilename = "Upload " + now
// bad test - only checks that timestamp specifies today's date
// could fail if executed at midnight.
overviewUploadedFile.filename.take(17) must be equalTo(defaultFilename.take(17))
}
}
}
|
overview/overview-server
|
web/test/models/upload/OverviewUploadedFileSpec.scala
|
Scala
|
agpl-3.0
| 2,630
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package kafka.api
import kafka.server.KafkaConfig
import kafka.utils.{Logging, ShutdownableThread, TestUtils}
import org.apache.kafka.clients.consumer._
import org.apache.kafka.clients.producer.{ProducerConfig, ProducerRecord}
import org.apache.kafka.common.TopicPartition
import org.junit.Assert._
import org.junit.{Test, Before}
import scala.collection.JavaConversions._
/**
* Integration tests for the new consumer that cover basic usage as well as server failures
*/
class ConsumerBounceTest extends IntegrationTestHarness with Logging {
val producerCount = 1
val consumerCount = 2
val serverCount = 3
val topic = "topic"
val part = 0
val tp = new TopicPartition(topic, part)
// configure the servers and clients
this.serverConfig.setProperty(KafkaConfig.ControlledShutdownEnableProp, "false") // speed up shutdown
this.serverConfig.setProperty(KafkaConfig.OffsetsTopicReplicationFactorProp, "3") // don't want to lose offset
this.serverConfig.setProperty(KafkaConfig.OffsetsTopicPartitionsProp, "1")
this.serverConfig.setProperty(KafkaConfig.ConsumerMinSessionTimeoutMsProp, "10") // set small enough session timeout
this.producerConfig.setProperty(ProducerConfig.ACKS_CONFIG, "all")
this.consumerConfig.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "my-test")
this.consumerConfig.setProperty(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 4096.toString)
this.consumerConfig.setProperty(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "100")
this.consumerConfig.setProperty(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, "30")
this.consumerConfig.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
override def generateConfigs() = {
FixedPortTestUtils.createBrokerConfigs(serverCount, zkConnect,enableControlledShutdown = false)
.map(KafkaConfig.fromProps(_, serverConfig))
}
@Before
override def setUp() {
super.setUp()
// create the test topic with all the brokers as replicas
TestUtils.createTopic(this.zkClient, topic, 1, serverCount, this.servers)
}
@Test
def testConsumptionWithBrokerFailures() = consumeWithBrokerFailures(10)
/*
* 1. Produce a bunch of messages
* 2. Then consume the messages while killing and restarting brokers at random
*/
def consumeWithBrokerFailures(numIters: Int) {
val numRecords = 1000
sendRecords(numRecords)
this.producers.foreach(_.close)
var consumed = 0
val consumer = this.consumers(0)
consumer.subscribe(List(topic))
val scheduler = new BounceBrokerScheduler(numIters)
scheduler.start()
while (scheduler.isRunning.get()) {
for (record <- consumer.poll(100)) {
assertEquals(consumed.toLong, record.offset())
consumed += 1
}
consumer.commitSync()
assertEquals(consumer.position(tp), consumer.committed(tp))
if (consumer.position(tp) == numRecords) {
consumer.seekToBeginning()
consumed = 0
}
}
scheduler.shutdown()
}
@Test
def testSeekAndCommitWithBrokerFailures() = seekAndCommitWithBrokerFailures(5)
def seekAndCommitWithBrokerFailures(numIters: Int) {
val numRecords = 1000
sendRecords(numRecords)
this.producers.foreach(_.close)
val consumer = this.consumers(0)
consumer.assign(List(tp))
consumer.seek(tp, 0)
// wait until all the followers have synced the last HW with leader
TestUtils.waitUntilTrue(() => servers.forall(server =>
server.replicaManager.getReplica(tp.topic(), tp.partition()).get.highWatermark.messageOffset == numRecords
), "Failed to update high watermark for followers after timeout")
val scheduler = new BounceBrokerScheduler(numIters)
scheduler.start()
while(scheduler.isRunning.get()) {
val coin = TestUtils.random.nextInt(3)
if (coin == 0) {
info("Seeking to end of log")
consumer.seekToEnd()
assertEquals(numRecords.toLong, consumer.position(tp))
} else if (coin == 1) {
val pos = TestUtils.random.nextInt(numRecords).toLong
info("Seeking to " + pos)
consumer.seek(tp, pos)
assertEquals(pos, consumer.position(tp))
} else if (coin == 2) {
info("Committing offset.")
consumer.commitSync()
assertEquals(consumer.position(tp), consumer.committed(tp))
}
}
}
private class BounceBrokerScheduler(val numIters: Int) extends ShutdownableThread("daemon-bounce-broker", false)
{
var iter: Int = 0
override def doWork(): Unit = {
killRandomBroker()
Thread.sleep(500)
restartDeadBrokers()
iter += 1
if (iter == numIters)
initiateShutdown()
else
Thread.sleep(500)
}
}
private def sendRecords(numRecords: Int) {
val futures = (0 until numRecords).map { i =>
this.producers(0).send(new ProducerRecord(topic, part, i.toString.getBytes, i.toString.getBytes))
}
futures.map(_.get)
}
}
|
mbrukman/kafka
|
core/src/test/scala/integration/kafka/api/ConsumerBounceTest.scala
|
Scala
|
apache-2.0
| 5,724
|
package com.eclipsesource.schema
import com.eclipsesource.schema.drafts.Version4
import com.eclipsesource.schema.test.{Assets, JsonSpec}
import org.specs2.mutable.Specification
import org.specs2.specification.AfterAll
import org.specs2.specification.core.Fragments
import org.specs2.specification.dsl.Online
import play.api.Application
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.test.TestServer
class RemoteSpecs extends Specification with JsonSpec with Online with AfterAll {
import Version4._
implicit val validator: SchemaValidator = {
SchemaValidator(Some(Version4)).addSchema(
"http://localhost:1234/scope_foo.json",
JsonSource.schemaFromString(
"""{
| "definitions": {
| "bar": { "type": "string" }
| }
|}""".stripMargin
).get
)
}
def createApp: Application = new GuiceApplicationBuilder()
.routes(Assets.routes(getClass, "remotes/")).build()
lazy val server = TestServer(port = 1234, createApp)
def afterAll: Unit = {
server.stop
Thread.sleep(1000)
}
def validateAjv(testName: String): Fragments = validate(testName, "ajv_tests")
sequential
"Validation from remote resources is possible" >> {
{
server.start
Thread.sleep(1000)
} must not(throwAn[Exception]) continueWith {
validateMultiple(
"ajv_tests" -> Seq(
"5_adding_dependency_after",
"5_recursive_references",
"12_restoring_root_after_resolve",
"13_root_ref_in_ref_in_remote_ref",
"14_ref_in_remote_ref_with_id",
"62_resolution_scope_change"
),
"draft4" -> Seq("refRemote")
)
}
}
validateAjv("1_ids_in_refs")
}
|
edgarmueller/play-json-schema-validator
|
src/test/scala/com/eclipsesource/schema/RemoteSpecs.scala
|
Scala
|
apache-2.0
| 1,746
|
package org.jetbrains.plugins.scala.codeInspection
import com.intellij.codeInspection._
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.extensions.VisitorWrapper
/**
* Pavel Fatin
*/
abstract class AbstractInspection(id: String, name: String) extends LocalInspectionTool {
def this() {
this(AbstractInspection.formatId(getClass), AbstractInspection.formatName(getClass))
}
def this(name: String) {
this(AbstractInspection.formatId(getClass), name)
}
override def getDisplayName: String = name
override final def buildVisitor(holder: ProblemsHolder, isOnTheFly: Boolean) = VisitorWrapper(actionFor(holder))
def actionFor(holder: ProblemsHolder): PartialFunction[PsiElement, Any]
}
object AbstractInspection {
private val CapitalLetterPattern = "(?<!=.)\\\\p{Lu}".r
def formatId(aClass: Class[_]) = {
aClass.getSimpleName.stripSuffix("Inspection")
}
def formatName(aClass: Class[_]) = {
val id = formatId(aClass)
CapitalLetterPattern.replaceAllIn(id, it => s" ${it.group(0).toLowerCase}")
}
}
|
triggerNZ/intellij-scala
|
src/org/jetbrains/plugins/scala/codeInspection/AbstractInspection.scala
|
Scala
|
apache-2.0
| 1,065
|
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.twitter.zipkin.query
case class QueryResponse(traceIds: Seq[Long],
startTs: Long,
endTs: Long)
|
ajantis/zipkin
|
zipkin-common/src/main/scala/com/twitter/zipkin/query/QueryResponse.scala
|
Scala
|
apache-2.0
| 763
|
package com.github.probe.backend
package data
import org.scala_tools.time.Imports._
object LogEntry {
object Severity extends Enumeration {
val Assert = Value("Assert")
val Error = Value("Error")
val Warn = Value("Warn")
val Info = Value("Info")
val Debug = Value("Debug")
val Verbose = Value("Verbose")
}
def apply(application: Option[String], time: LocalDateTime, processId: Int, threadId: Int, severity: LogEntry.Severity.Value, tag: String, msg: String): LogEntry =
LogEntry(None, application, time, processId, threadId, severity, tag, msg)
}
case class LogEntry(id: Option[db.Log.idType], application: Option[String], time: LocalDateTime, processId: Int, threadId: Int, severity: LogEntry.Severity.Value, tag: String, msg: String)
|
khernyo/freezing-ninja
|
backend/src/main/scala/com/github/probe/backend/data/LogEntry.scala
|
Scala
|
apache-2.0
| 774
|
package org.embulk.input.dynamodb.deprecated
import com.fasterxml.jackson.annotation.JsonProperty
import com.google.common.base.Objects
class FilterConfig {
private var _name: String = _
private var _type: String = _
private var _condition: String = _
private var _value: String = _
private var _value2: String = _
def this(
@JsonProperty("name") _name: String,
@JsonProperty("type") _type: String,
@JsonProperty("condition") _condition: String,
@JsonProperty("value") _value: String,
@JsonProperty("value2") _value2: String
) {
this()
this._name = _name
this._type = _type
this._condition = _condition
this._value = _value
this._value2 = _value2
}
@JsonProperty("name")
def getName = _name
@JsonProperty("type")
def getType = _type
@JsonProperty("condition")
def getCondition = _condition
@JsonProperty("value")
def getValue = _value
@JsonProperty("value2")
def getValue2 = _value2
override def equals(obj: Any): Boolean = {
if (this == obj) return true
if (!obj.isInstanceOf[FilterConfig]) return false
val other: FilterConfig = obj.asInstanceOf[FilterConfig]
Objects.equal(this._name, other._name) &&
Objects.equal(this._type, other._type) &&
Objects.equal(this._condition, other._condition) &&
Objects.equal(this._value, other._value) &&
Objects.equal(this._value2, other._value2)
}
}
|
lulichn/embulk-input-dynamodb
|
src/main/scala/org/embulk/input/dynamodb/deprecated/FilterConfig.scala
|
Scala
|
mit
| 1,426
|
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogdebugger.ui.fieldvisualizations.vector
import libcog._
import cogdebugger.ui.components.WrapPanel
import cogdebugger.ui.fieldvisualizations._
import cogdebugger.ui.fieldvisualizations.scalar.ScalarMemoryView
import scala.swing._
import cogdebugger.{ToolFactory, BooleanProperty, RestorableState, PropertyValueChanged}
import scala.xml.{Node, Elem}
import java.nio.FloatBuffer
/*
* Created with IntelliJ IDEA.
* User: gonztobi
* Date: 12/3/12
* Time: 11:34 AM
*/
/** A display for VectorFields that breaks the vectors apart into their
* individual components and displays each set of components as a grayscale
* image - essentially, slicing each component out of the field as displaying
* it with a [[ScalarMemoryView]].
*
* Just as a vector is a linear collection of scalars, a VectorField can be
* thought of as a linear collection of ScalarFields, one for each component.
* This visualization treats the target VectorField just so, and essentially
* serves to translate the vector field into a multi-layer (3D) ScalarField
* and view it as such.
*
* Each individual vector component is "sliced" out of the field to create
* a scalar field with the same dimensions as the original vector field. That
* scalar field is visually represented as a grayscale image, with a single
* pixel per field element. The default color mapping represents the minimum
* value for the vector component as black, the maximum as white, and
* intermediate values falling on a linear gradient between those two
* extremes. Thus, a vector field of n dimensions will be displayed as n
* grayscale images. Each image annotated with a title indicating the vector
* component it represents as well as a color key showing which colors
* correspond to the minimum and maximum values for that component, as well
* as the value of the minimum and maximums themselves.
*
* As a rought example, a vector field with 50 rows and 100 columns containing
* two-dimensional vectors would look something like this:
* {{{
* Component 0 Component 1
* +---------------+ +---------------+
* | | | |
* | 100x50 image | | 100x50 image |
* | | | |
* +---------------+ +---------------+
* [X] = <min> [ ] = <max> [X] = <min> [ ] = <max>
* }}}
*
* @param fieldType A FieldType describing the shape of the field and its
* vectors.
*/
class VectorComponentsSubpanel(fieldType: FieldType)
extends BorderPanel
with Viewer
with ZoomProperty
with RestorableState
with ToolbarItems
{
private def fieldShape = fieldType.fieldShape
private def tensorShape = fieldType.tensorShape
// We need to cache the last seen data in case we have to redraw the view for
// some reason (zoom changes, floating max toggled on/off, etc.)
protected var _data: AbstractFieldMemory = null
protected def data: VectorFieldReader = _data.asInstanceOf[VectorFieldReader]
/** Default zoom increment. */
var zoomIncrement = 1f
val FloatingMaxProperty = new BooleanProperty("Floating Max", false)
properties += ZoomProperty
properties += FloatingMaxProperty
val vectorLength = tensorShape(0)
val (layers, rows, columns) = fieldShape.dimensions match {
case 0 => (1, 1, 1)
case 1 => (1, 1, fieldShape(0))
case 2 => (1, fieldShape(0), fieldShape(1))
case x => throw new RuntimeException("Only 0, 1, and 2D VectorFields are supported.")
}
val scalarPanels = Array.tabulate(vectorLength) { i => new ScalarMemoryView(fieldType) }
private var idx = 0
val componentDisplays = scalarPanels.map(panel => {
// Attach title and legend to each subpanel
val label = new Label("Component "+idx); idx += 1
val legend = new BoxPanel(Orientation.Horizontal)
legend.contents ++= panel.toolbarComponents(panel.legendGroupIdx).components
new BorderPanel() {
add(label, BorderPanel.Position.North)
add(panel, BorderPanel.Position.Center)
add(legend, BorderPanel.Position.South)
}
})
listenTo(FloatingMaxProperty, ZoomProperty)
reactions += {
case PropertyValueChanged(ZoomProperty, oldValue, newValue: Float) =>
scalarPanels.foreach(_.zoomLevel = newValue)
case PropertyValueChanged(FloatingMaxProperty, oldValue, newValue: Boolean) =>
for (panel <- scalarPanels)
panel.FloatingMaxProperty.value = newValue
if (data != null)
update(null, data) // Force redraw of the data
}
val viewPanel = new WrapPanel(componentDisplays: _*)
add(viewPanel, BorderPanel.Position.Center)
def update(src: AnyRef, data: AbstractFieldMemory, time: Long) {
(data.elementType, data.tensorShape.dimensions) match {
case (Float32, 1) =>
_data = data
update(src, data.asInstanceOf[VectorFieldReader])
case _ =>
throw new RuntimeException("Viewer got unexpected data")
}
}
def update(src: AnyRef, data: VectorFieldReader) {
// for (i <- (0 until vectorLength)) {
for (i <- (0 until vectorLength).par) {
val sfr = new VectorComponentSliceAsScalarFieldReader(data.asInstanceOf[VectorFieldMemory], i)
scalarPanels(i).update(null, sfr)
}
}
def save: Elem =
<VectorComponentsSubPanel>
{ propertiesTag }
</VectorComponentsSubPanel>
def restore(tag: Node) {
(tag \ "VectorComponentsSubPanel" \ "properties").headOption.foreach(xmlToProperties)
}
def toolbarComponents: Seq[ComponentGroup] = {
val floatingMaxButton = ToolFactory.toggle(FloatingMaxProperty)
Seq(ComponentGroup(floatingMaxButton))
}
}
object VectorComponentsSubpanel {
val resetAbsMaxPixelTooltip =
"Controls how points in the field are mapped to luminance values. When " +
"enabled, luminance is based on maximum value in the field at " +
"the current sim tick. When disabled, luminance is based on " +
"maximum value in the field seen across entire sim history."
}
/** A wrapper for VectorFieldMemory that presents a single component slice of
* it as a ScalarFieldReader. Handy for re-using viewers that already know how
* to present scalar fields but don't have explicit support for vector fields.
*
* This is perhaps dodging the Cog API too much. The last checkin that didn't
* use this system was revision 7386.
*
* @param vectorField The target VectorField out of which a single vector
* component is to be sliced out as a ScalarField.
* @param componentIdx The index of the vector component in `vectorField` that
* is to be presented as a ScalarField.
*/
class VectorComponentSliceAsScalarFieldReader(vectorField: VectorFieldMemory,
componentIdx: Int)
extends ScalarFieldReader {
val layers: Int = vectorField.layers
val rows: Int = vectorField.rows
val columns: Int = vectorField.columns
/** Shape of the tensors in the field. */
val tensorShape: Shape = Shape()
/** Shape of the field. */
val fieldShape: Shape = vectorField.fieldShape
/** Type of the field. */
val fieldType: FieldType =
new FieldType(fieldShape, tensorShape, vectorField.fieldType.elementType)
private def paddedColumns = vectorField.paddedColumns
private def dimensions = fieldShape.dimensions
private val page = {
val startPage = componentIdx * vectorField.pageSize
vectorField.directBuffer.duplicate()
.position(startPage)
.limit(startPage + vectorField.pageSize)
.asInstanceOf[FloatBuffer].slice()
}
/** Compute the L-infinity norm on the difference of `this` and `that`.
*
* @param that AbstractFieldMemory to compare to `this`
* @return L-infinity error
*/
def compareLinf(that: FieldReader): Float = {
require(fieldType.equals(that.fieldType))
require(that.isInstanceOf[ScalarFieldReader])
iterator.zip(that.asInstanceOf[ScalarFieldReader].iterator)
.map(v => math.abs(v._1 - v._2))
.max
}
/** An iterator over all values in the field, scanning in row-major order. */
def iterator: Iterator[Float] = new Iterator[Float] {
private var index = 0
private var column = 0
private val actualColumns = fieldType.fieldShape.lastDimension
private val columnPadding = paddedColumns - actualColumns
private val lastIndex = layers * rows * paddedColumns - columnPadding
private val buffer = page.duplicate
buffer.rewind
def hasNext = (index == 0) || (index < lastIndex)
def next(): Float = {
val value = buffer.get(index)
column += 1
if (column == actualColumns) {
column = 0
index += columnPadding + 1
} else
index += 1
value
}
}
/** Read the single value in a 0D scalar field. */
def read(): Float = {
require(dimensions == 0)
page.get(0)
}
/** Read the value at (`col`) in a 1D scalar field. */
def read(col: Int): Float = {
require(dimensions == 1)
page.get(col)
}
/** Read the value at (`row`, `col`) in a 2D scalar field. */
def read(row: Int, col: Int): Float = {
require(dimensions == 2)
page.get(row * paddedColumns + col)
}
/** Read the value at (`layer`, `row`, `col`) in a 3D scalar field. */
def read(layer: Int, row: Int, col: Int): Float = {
require(dimensions == 3)
page.get(layer * rows * paddedColumns + row * paddedColumns + col)
}
/** Read the entire 0D or 1D field into the provided Array[Float]. */
def get(dst: Array[Float]): Unit = {
require(dimensions == 0 || dimensions == 1)
page.rewind
require(dst.size == columns,
s"Mismatched column array in VectorComponentsSubpanel.get(), expecting $columns, saw ${dst.size}.")
page.get(dst)
}
/** Read a portion of the values of the 0D or 1D scalar field into an
* Array[Float], starting at the source buffer's `srcIndex` position.
*/
def get(srcIndex: Int, dst: Array[Float]) {
require(dimensions == 0 || dimensions == 1)
page.position(srcIndex)
page.get(dst)
}
/** Read `length` values of the 0D or 1D scalar field into the `dst`
* Array[Float], starting at the source buffer's `srcIndex` position,
* and placing the values in the `dst` Array starting at position
* `dstIndex`.
*/
def get(srcIndex: Int, dst: Array[Float], dstIndex: Int, length: Int) {
require(dimensions == 0 || dimensions == 1)
page.position(srcIndex)
page.get(dst, dstIndex, length)
}
/** Read the entire 2D field into the provided Array[Array[Float]] */
def get(dst: Array[Array[Float]]): Unit = {
require(dimensions == 2)
for (r <- 0 until dst.size) {
page.position(r*paddedColumns)
require(dst(r).size == columns, s"Mismatched column array in VectorComponentsSubpanel.get(), expecting $columns, saw ${dst(r).size}.")
page.get(dst(r))
}
}
/** Read the entire 3D field into the provided Array[Array[Array[Float]]] */
def get(dst: Array[Array[Array[Float]]]): Unit = {
require(dimensions == 3)
for (l <- 0 until dst.size) {
for (r <- 0 until dst(l).size) {
page.position(l*r*paddedColumns)
require(dst(l)(r).size == columns, s"Mismatched column array in VectorComponentsSubpanel.get(), expecting $columns, saw ${dst(l)(r).size}.")
page.get(dst(l)(r))
}
}
}
}
|
hpe-cct/cct-core
|
src/main/scala/cogdebugger/ui/fieldvisualizations/vector/VectorComponentsSubpanel.scala
|
Scala
|
apache-2.0
| 12,140
|
package io.buoyant.k8s
import com.fasterxml.jackson.annotation.{JsonProperty, JsonSubTypes, JsonTypeInfo}
import com.fasterxml.jackson.core.`type`.TypeReference
import com.twitter.finagle.{http, Service => FService}
import io.buoyant.k8s.{KubeObject => BaseObject}
package object v1beta1 {
type Client = FService[http.Request, http.Response]
val group = "apis"
val version = "extensions/v1beta1"
trait Object extends BaseObject
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type")
@JsonSubTypes(Array(
new JsonSubTypes.Type(value = classOf[IngressAdded], name = "ADDED"),
new JsonSubTypes.Type(value = classOf[IngressModified], name = "MODIFIED"),
new JsonSubTypes.Type(value = classOf[IngressDeleted], name = "DELETED"),
new JsonSubTypes.Type(value = classOf[IngressError], name = "ERROR")
))
sealed trait IngressWatch extends Watch[Ingress]
case class IngressAdded(
`object`: Ingress
) extends IngressWatch with Watch.Added[Ingress]
case class IngressModified(
`object`: Ingress
) extends IngressWatch with Watch.Modified[Ingress]
case class IngressDeleted(
`object`: Ingress
) extends IngressWatch with Watch.Deleted[Ingress]
case class IngressError(
@JsonProperty(value = "object") status: Status
) extends IngressWatch with Watch.Error[Ingress]
implicit object IngressDescriptor extends ObjectDescriptor[Ingress, IngressWatch] {
def listName = "ingresses"
def toWatch(e: Ingress) = IngressModified(e)
}
implicit private val ingressListType = new TypeReference[IngressList] {}
implicit private val ingressType = new TypeReference[Ingress] {}
implicit private val ingressWatch = new TypeReference[IngressWatch] {}
case class Api(client: Client) extends Version[Object] {
def group = v1beta1.group
def version = v1beta1.version
override def withNamespace(ns: String) = new NsApi(client, ns)
def ingresses = listResource[Ingress, IngressWatch, IngressList]()
}
class NsApi(client: Client, ns: String)
extends NsVersion[Object](client, v1beta1.group, v1beta1.version, ns) {
def ingresses = listResource[Ingress, IngressWatch, IngressList]()
}
case class IngressList(
items: Seq[Ingress],
kind: Option[String] = None,
metadata: Option[ObjectMeta] = None,
apiVersion: Option[String] = None
) extends KubeList[Ingress]
case class Ingress(
spec: Option[IngressSpec] = None,
status: Option[IngressStatus] = None,
kind: Option[String] = None,
metadata: Option[ObjectMeta] = None,
apiVersion: Option[String] = None
) extends Object
case class IngressStatus(
loadBalancer: Option[LoadBalancerStatus] = None
)
case class IngressSpec(
backend: Option[IngressBackend] = None,
tls: Option[Seq[IngressTLS]],
rules: Option[Seq[IngressRule]]
)
case class IngressBackend(
serviceName: String,
servicePort: String
)
case class IngressTLS(
hosts: Option[Seq[String]],
secretName: Option[String]
)
case class IngressRule(
host: Option[String],
http: Option[HTTPIngressRuleValue]
)
case class HTTPIngressRuleValue(
paths: Seq[HTTPIngressPath]
)
case class HTTPIngressPath(
path: Option[String],
backend: IngressBackend
)
case class LoadBalancerStatus(
ingress: Option[Seq[LoadBalancerIngress]]
)
case class LoadBalancerIngress(
ip: String
)
}
|
hhtpcd/linkerd
|
k8s/src/main/scala/io/buoyant/k8s/v1beta1.scala
|
Scala
|
apache-2.0
| 3,446
|
/*
/*
* Copyright © 2015-2019 the contributors (see Contributors.md).
*
* This file is part of Knora.
*
* Knora is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Knora is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with Knora. If not, see <http://www.gnu.org/licenses/>.
*/
package org.knora.webapi.update.plugins
import org.eclipse.rdf4j.model.Model
import org.eclipse.rdf4j.repository.sail.SailRepository
import org.knora.webapi.messages.store.triplestoremessages.{SparqlSelectResponse, SparqlSelectResponseBody}
class UpdatePluginPR1322Spec extends UpdatePluginSpec {
"Update plugin PR1322" should {
"add UUIDs to values" in {
// Parse the input file.
val model: Model = trigFileToModel("src/test/resources/test-data/update/pr1322.trig")
// Use the plugin to transform the input.
val plugin = new UpdatePluginPR1322
plugin.transform(model)
// Make an in-memory repository containing the transformed model.
val repository: SailRepository = makeRepository(model)
val connection = repository.getConnection
// Check that UUIDs were added.
val query: String =
"""
|PREFIX knora-base: <http://www.knora.org/ontology/knora-base#>
|
|SELECT ?value WHERE {
| ?value knora-base:valueHasUUID ?valueHasUUID .
|} ORDER BY ?value
|""".stripMargin
val queryResult1: SparqlSelectResponse = doSelect(selectQuery = query, connection = connection)
val expectedResultBody: SparqlSelectResponseBody = expectedResult(
Seq(
Map(
"value" -> "http://rdfh.ch/0001/thing-with-history/values/1c"
),
Map(
"value" -> "http://rdfh.ch/0001/thing-with-history/values/2c"
)
)
)
assert(queryResult1.results == expectedResultBody)
connection.close()
repository.shutDown()
}
}
}
*/
|
musicEnfanthen/Knora
|
webapi/src/test/scala/org/knora/webapi/update/plugins/UpdatePluginPR1322Spec.scala
|
Scala
|
agpl-3.0
| 2,647
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations.calculations
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.computations._
class TotalDeductionsCalculatorSpec extends WordSpec with Matchers {
"Total Deductions Calculator" should {
"calculate deductions with populated values" in new TotalDeductionsCalculator {
totalDeductionsCalculation(cp55 = CP55(Some(1)),
cp57 = CP57(Some(2)),
cp58 = CP58(3),
cp507 = CP507(4),
cp505 = CP505(Some(5)),
cp983 = CP983(10)) shouldBe CP59(25)
}
}
}
|
hmrc/ct-calculations
|
src/test/scala/uk/gov/hmrc/ct/computations/calculations/TotalDeductionsCalculatorSpec.scala
|
Scala
|
apache-2.0
| 1,279
|
package io.sipstack.transaction
import akka.actor.Actor
import akka.actor.ActorRef
import akka.actor.Props
import io.sipstack.config.TransactionLayerConfiguration
import io.sipstack.transport.FlowActor.IncomingRequest
import io.sipstack.transport.FlowActor.IncomingResponse
import akka.actor.Terminated
final object TransactionSupervisor {
def props(next:ActorRef, config:TransactionLayerConfiguration) : Props = Props(new TransactionSupervisor(next, config))
}
final class TransactionSupervisor(next:ActorRef, config:TransactionLayerConfiguration) extends Actor {
override def receive = {
case req:IncomingRequest => processIncomingRequest(req)
case resp:IncomingResponse => processIncomingResponse(resp)
case Terminated(_) => // ignore
case msg => println("Guess I should take care of it: " + msg)
}
private def processIncomingRequest(req:IncomingRequest) {
val child = context.child(req.transactionId.id)
if (child.isDefined) {
child.get forward req
} else {
context.watch(context.actorOf(ServerTransaction.props(sender, next, config, req), req.transactionId.id)) forward req
}
}
private def processIncomingResponse(resp:IncomingResponse) {
}
}
|
jonbo372/sipstack
|
sipstack-transaction/src/main/scala/io/sipstack/transaction/TransactionSupervisor.scala
|
Scala
|
mit
| 1,227
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.scala
import java.util.concurrent.TimeUnit
import org.apache.flink.api.common.functions.ReduceFunction
import org.apache.flink.api.java.tuple.Tuple
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.AssignerWithPunctuatedWatermarks
import org.apache.flink.streaming.api.functions.sink.SinkFunction
import org.apache.flink.streaming.api.functions.source.SourceFunction
import org.apache.flink.streaming.api.scala.testutils.{CheckingIdentityRichAllWindowFunction, CheckingIdentityRichProcessAllWindowFunction, CheckingIdentityRichProcessWindowFunction, CheckingIdentityRichWindowFunction}
import org.apache.flink.streaming.api.watermark.Watermark
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.test.util.AbstractTestBase
import org.junit.Assert._
import org.junit.Test
import scala.collection.mutable
/**
* Tests for Folds over windows. These also test whether OutputTypeConfigurable functions
* work for windows, because FoldWindowFunction is OutputTypeConfigurable.
*/
class WindowReduceITCase extends AbstractTestBase {
@Test
def testReduceWindow(): Unit = {
WindowReduceITCase.testResults = mutable.MutableList()
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
env.setParallelism(1)
val source1 = env.addSource(new SourceFunction[(String, Int)]() {
def run(ctx: SourceFunction.SourceContext[(String, Int)]) {
ctx.collect(("a", 0))
ctx.collect(("a", 1))
ctx.collect(("a", 2))
ctx.collect(("b", 3))
ctx.collect(("b", 4))
ctx.collect(("b", 5))
ctx.collect(("a", 6))
ctx.collect(("a", 7))
ctx.collect(("a", 8))
// source is finite, so it will have an implicit MAX watermark when it finishes
}
def cancel() {
}
}).assignTimestampsAndWatermarks(new WindowReduceITCase.Tuple2TimestampExtractor)
source1
.keyBy(0)
.window(TumblingEventTimeWindows.of(Time.of(3, TimeUnit.MILLISECONDS)))
.reduce( (a, b) => (a._1 + b._1, a._2 + b._2) )
.addSink(new SinkFunction[(String, Int)]() {
override def invoke(value: (String, Int)) {
WindowReduceITCase.testResults += value.toString
}
})
env.execute("Reduce Window Test")
val expectedResult = mutable.MutableList(
"(aaa,3)",
"(aaa,21)",
"(bbb,12)")
assertEquals(expectedResult.sorted, WindowReduceITCase.testResults.sorted)
}
@Test
def testReduceWithWindowFunction(): Unit = {
WindowReduceITCase.testResults = mutable.MutableList()
CheckingIdentityRichWindowFunction.reset()
val reduceFunc = new ReduceFunction[(String, Int)] {
override def reduce(a: (String, Int), b: (String, Int)): (String, Int) = {
(a._1 + b._1, a._2 + b._2)
}
}
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
env.setParallelism(1)
val source1 = env.addSource(new SourceFunction[(String, Int)]() {
def run(ctx: SourceFunction.SourceContext[(String, Int)]) {
ctx.collect(("a", 0))
ctx.collect(("a", 1))
ctx.collect(("a", 2))
ctx.collect(("b", 3))
ctx.collect(("b", 4))
ctx.collect(("b", 5))
ctx.collect(("a", 6))
ctx.collect(("a", 7))
ctx.collect(("a", 8))
// source is finite, so it will have an implicit MAX watermark when it finishes
}
def cancel() {
}
}).assignTimestampsAndWatermarks(new WindowReduceITCase.Tuple2TimestampExtractor)
source1
.keyBy(0)
.window(TumblingEventTimeWindows.of(Time.of(3, TimeUnit.MILLISECONDS)))
.reduce(
reduceFunc,
new CheckingIdentityRichWindowFunction[(String, Int), Tuple, TimeWindow]())
.addSink(new SinkFunction[(String, Int)]() {
override def invoke(value: (String, Int)) {
WindowReduceITCase.testResults += value.toString
}
})
env.execute("Reduce Window Test")
val expectedResult = mutable.MutableList(
"(aaa,3)",
"(aaa,21)",
"(bbb,12)")
assertEquals(expectedResult.sorted, WindowReduceITCase.testResults.sorted)
CheckingIdentityRichWindowFunction.checkRichMethodCalls()
}
@Test
def testReduceWithProcessWindowFunction(): Unit = {
WindowReduceITCase.testResults = mutable.MutableList()
CheckingIdentityRichProcessWindowFunction.reset()
val reduceFunc = new ReduceFunction[(String, Int)] {
override def reduce(a: (String, Int), b: (String, Int)): (String, Int) = {
(a._1 + b._1, a._2 + b._2)
}
}
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
env.setParallelism(1)
val source1 = env.addSource(new SourceFunction[(String, Int)]() {
def run(ctx: SourceFunction.SourceContext[(String, Int)]) {
ctx.collect(("a", 0))
ctx.collect(("a", 1))
ctx.collect(("a", 2))
ctx.collect(("b", 3))
ctx.collect(("b", 4))
ctx.collect(("b", 5))
ctx.collect(("a", 6))
ctx.collect(("a", 7))
ctx.collect(("a", 8))
// source is finite, so it will have an implicit MAX watermark when it finishes
}
def cancel() {
}
}).assignTimestampsAndWatermarks(new WindowReduceITCase.Tuple2TimestampExtractor)
source1
.keyBy(0)
.window(TumblingEventTimeWindows.of(Time.of(3, TimeUnit.MILLISECONDS)))
.reduce(
reduceFunc,
new CheckingIdentityRichProcessWindowFunction[(String, Int), Tuple, TimeWindow]())
.addSink(new SinkFunction[(String, Int)]() {
override def invoke(value: (String, Int)) {
WindowReduceITCase.testResults += value.toString
}
})
env.execute("Reduce Process Window Test")
val expectedResult = mutable.MutableList(
"(aaa,3)",
"(aaa,21)",
"(bbb,12)")
assertEquals(expectedResult.sorted, WindowReduceITCase.testResults.sorted)
CheckingIdentityRichProcessWindowFunction.checkRichMethodCalls()
}
@Test
def testReduceAllWindow(): Unit = {
WindowReduceITCase.testResults = mutable.MutableList()
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
env.setParallelism(1)
val source1 = env.addSource(new SourceFunction[(String, Int)]() {
def run(ctx: SourceFunction.SourceContext[(String, Int)]) {
ctx.collect(("a", 0))
ctx.collect(("a", 1))
ctx.collect(("a", 2))
ctx.collect(("b", 3))
ctx.collect(("a", 3))
ctx.collect(("b", 4))
ctx.collect(("a", 4))
ctx.collect(("b", 5))
ctx.collect(("a", 5))
// source is finite, so it will have an implicit MAX watermark when it finishes
}
def cancel() {
}
}).assignTimestampsAndWatermarks(new WindowReduceITCase.Tuple2TimestampExtractor)
source1
.windowAll(TumblingEventTimeWindows.of(Time.of(3, TimeUnit.MILLISECONDS)))
.reduce( (a, b) => (a._1 + b._1, a._2 + b._2) )
.addSink(new SinkFunction[(String, Int)]() {
override def invoke(value: (String, Int)) {
WindowReduceITCase.testResults += value.toString
}
})
env.execute("Fold All-Window Test")
val expectedResult = mutable.MutableList(
"(aaa,3)",
"(bababa,24)")
assertEquals(expectedResult.sorted, WindowReduceITCase.testResults.sorted)
}
@Test
def testReduceAllWithWindowFunction(): Unit = {
WindowReduceITCase.testResults = mutable.MutableList()
CheckingIdentityRichAllWindowFunction.reset()
val reduceFunc = new ReduceFunction[(String, Int)] {
override def reduce(a: (String, Int), b: (String, Int)): (String, Int) = {
(a._1 + b._1, a._2 + b._2)
}
}
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
env.setParallelism(1)
val source1 = env.addSource(new SourceFunction[(String, Int)]() {
def run(ctx: SourceFunction.SourceContext[(String, Int)]) {
ctx.collect(("a", 0))
ctx.collect(("a", 1))
ctx.collect(("a", 2))
ctx.collect(("b", 3))
ctx.collect(("a", 3))
ctx.collect(("b", 4))
ctx.collect(("a", 4))
ctx.collect(("b", 5))
ctx.collect(("a", 5))
// source is finite, so it will have an implicit MAX watermark when it finishes
}
def cancel() {
}
}).assignTimestampsAndWatermarks(new WindowReduceITCase.Tuple2TimestampExtractor)
source1
.windowAll(TumblingEventTimeWindows.of(Time.of(3, TimeUnit.MILLISECONDS)))
.reduce(
reduceFunc,
new CheckingIdentityRichAllWindowFunction[(String, Int), TimeWindow]())
.addSink(new SinkFunction[(String, Int)]() {
override def invoke(value: (String, Int)) {
WindowReduceITCase.testResults += value.toString
}
})
env.execute("Fold All-Window Test")
val expectedResult = mutable.MutableList(
"(aaa,3)",
"(bababa,24)")
assertEquals(expectedResult.sorted, WindowReduceITCase.testResults.sorted)
CheckingIdentityRichAllWindowFunction.checkRichMethodCalls()
}
@Test
def testReduceAllWithProcessWindowFunction(): Unit = {
WindowReduceITCase.testResults = mutable.MutableList()
CheckingIdentityRichProcessAllWindowFunction.reset()
val reduceFunc = new ReduceFunction[(String, Int)] {
override def reduce(a: (String, Int), b: (String, Int)): (String, Int) = {
(a._1 + b._1, a._2 + b._2)
}
}
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
env.setParallelism(1)
val source1 = env.addSource(new SourceFunction[(String, Int)]() {
def run(ctx: SourceFunction.SourceContext[(String, Int)]) {
ctx.collect(("a", 0))
ctx.collect(("a", 1))
ctx.collect(("a", 2))
ctx.collect(("b", 3))
ctx.collect(("a", 3))
ctx.collect(("b", 4))
ctx.collect(("a", 4))
ctx.collect(("b", 5))
ctx.collect(("a", 5))
// source is finite, so it will have an implicit MAX watermark when it finishes
}
def cancel() {
}
}).assignTimestampsAndWatermarks(new WindowReduceITCase.Tuple2TimestampExtractor)
source1
.windowAll(TumblingEventTimeWindows.of(Time.of(3, TimeUnit.MILLISECONDS)))
.reduce(
reduceFunc,
new CheckingIdentityRichProcessAllWindowFunction[(String, Int), TimeWindow]())
.addSink(new SinkFunction[(String, Int)]() {
override def invoke(value: (String, Int)) {
WindowReduceITCase.testResults += value.toString
}
})
env.execute("Fold All-Window Test")
val expectedResult = mutable.MutableList(
"(aaa,3)",
"(bababa,24)")
assertEquals(expectedResult.sorted, WindowReduceITCase.testResults.sorted)
CheckingIdentityRichProcessAllWindowFunction.checkRichMethodCalls()
}
}
object WindowReduceITCase {
private var testResults: mutable.MutableList[String] = null
private class Tuple2TimestampExtractor extends AssignerWithPunctuatedWatermarks[(String, Int)] {
private var currentTimestamp = -1L
override def extractTimestamp(element: (String, Int), previousTimestamp: Long): Long = {
currentTimestamp = element._2
currentTimestamp
}
def checkAndGetNextWatermark(
lastElement: (String, Int),
extractedTimestamp: Long): Watermark = {
new Watermark(lastElement._2 - 1)
}
}
}
|
hequn8128/flink
|
flink-streaming-scala/src/test/scala/org/apache/flink/streaming/api/scala/WindowReduceITCase.scala
|
Scala
|
apache-2.0
| 12,866
|
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.ast.conditions
import org.neo4j.cypher.internal.frontend.v2_3.ast._
import org.neo4j.cypher.internal.frontend.v2_3.test_helpers.CypherFunSuite
class AggregationsAreIsolatedTest extends CypherFunSuite with AstConstructionTestSupport {
private val condition: (Any => Seq[String]) = aggregationsAreIsolated
test("happy when aggregation are top level in expressions") {
val ast: Expression = CountStar()_
condition(ast) shouldBe empty
}
test("unhappy when aggregation is sub-expression of the expressions") {
val ast: Expression = Equals(CountStar()_, UnsignedDecimalIntegerLiteral("42")_)_
condition(ast) should equal(Seq(s"Expression $ast contains child expressions which are aggregations"))
}
test("unhappy when aggregations are both top-level and sub-expression of the expression") {
val equals: Expression = Equals(CountStar()_, UnsignedDecimalIntegerLiteral("42")_)_
val ast: Expression = FunctionInvocation(FunctionName("count")_, equals)_
condition(ast) should equal(Seq(s"Expression $equals contains child expressions which are aggregations"))
}
}
|
HuangLS/neo4j
|
community/cypher/cypher-compiler-2.3/src/test/scala/org/neo4j/cypher/internal/compiler/v2_3/ast/conditions/AggregationsAreIsolatedTest.scala
|
Scala
|
apache-2.0
| 1,948
|
package mesosphere.marathon
package metrics.current
sealed trait UnitOfMeasurement
object UnitOfMeasurement {
case object None extends UnitOfMeasurement
// Memory is measure in bytes. ".bytes" is appended to metric names.
case object Memory extends UnitOfMeasurement
// Time is measured in seconds. ".seconds" is appended to timer names.
case object Time extends UnitOfMeasurement
}
|
mesosphere/marathon
|
src/main/scala/mesosphere/marathon/metrics/current/UnitOfMeasurement.scala
|
Scala
|
apache-2.0
| 397
|
package colossus.metrics
import colossus.metrics.collectors.{Counter, DefaultCounter}
import scala.concurrent.duration._
class CounterSpec extends MetricIntegrationSpec {
def counter = new DefaultCounter("/foo")
"Counter" must {
"increment" in {
val c = counter
c.get() must equal(0)
c.increment()
c.get() must equal(1)
}
"decrement" in {
val c = counter
c.increment()
c.get() must equal(1)
c.decrement()
c.get() must equal(0)
}
"set" in {
val c = counter
c.set(value = 3456)
c.get() must equal(3456)
}
"correctly handle tags" in {
val c = counter
c.set(tags = Map("a" -> "a"), 123)
c.increment(tags = Map("a" -> "b"))
c.increment(tags = Map("a" -> "b"))
c.get(Map("a" -> "a")) must equal(123)
c.get(Map("a" -> "b")) must equal(2)
}
"return no metrics when not used yet" in {
counter.tick(1.second) must equal(Map())
}
"have correct address" in {
implicit val ns = MetricContext("/foo", Collection.withReferenceConf(Seq(1.second))) / "bar"
val c = Counter("/baz")
c.address must equal(MetricAddress("/foo/bar/baz"))
}
}
}
|
tumblr/colossus
|
colossus-metrics/src/test/scala/colossus/metrics/CounterSpec.scala
|
Scala
|
apache-2.0
| 1,238
|
package mist.api.data
import scala.util._
trait Json4sConversion {
/** For running mist jobs directly from spark-submit **/
def parse(s: String): Try[JsData] = {
import org.json4s._
def translateAst(in: JValue): JsData = in match {
case JNothing => JsNull //???
case JNull => JsNull
case JString(s) => JsString(s)
case JDouble(d) => JsNumber(d)
case JDecimal(d) => JsNumber(d)
case JInt(i) => JsNumber(i)
case JLong(l) => JsNumber(l)
case JBool(v) => JsBoolean(v)
case JObject(fields) => JsMap(fields.map({case (k, v) => k -> translateAst(v)}): _*)
case JArray(elems) => JsList(elems.map(translateAst))
case JSet(elems) => JsList(elems.toList.map(translateAst))
}
Try(org.json4s.jackson.JsonMethods.parse(s, useBigDecimalForDouble = true)).map(json4sJs => translateAst(json4sJs))
}
def parseRoot(s: String): Try[JsMap] = parse(s).flatMap {
case m:JsMap => Success(m)
case _ => Failure(new IllegalArgumentException(s"Couldn't parse js object from input: $s"))
}
def formattedString(js: JsData): String = {
import org.json4s._
def translateAst(in: JsData): JValue = in match {
case JsNull => JNull
case JsString(s) => JString(s)
case JsNumber(d) =>
d.toBigIntExact() match {
case Some(x) => JInt(x)
case None => JDecimal(d)
}
case JsTrue => JBool(true)
case JsFalse => JBool(false)
case JsMap(fields) =>
val x = fields.map({case (k, v) => k -> translateAst(v)}).toList
println(x)
JObject(fields.map({case (k, v) => k -> translateAst(v)}).toList)
case JsList(elems) => JArray(elems.map(translateAst).toList)
}
org.json4s.jackson.JsonMethods.pretty(translateAst(js))
}
}
object Json4sConversion extends Json4sConversion
|
Hydrospheredata/mist
|
mist-lib/src/main/spark-2.4.0/mist/api/data/Json4sConversion.scala
|
Scala
|
apache-2.0
| 1,847
|
package chandu0101.scalajs.react.components.listviews
import chandu0101.scalajs.react.components.searchboxes.ReactSearchBox
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import scala.scalajs.js
import scalacss.Defaults._
import scalacss.ScalaCssReact._
object ReactListView {
class Style extends StyleSheet.Inline {
import dsl._
val listGroup = style(marginBottom(20.px),
paddingLeft.`0`,
&.firstChild.lastChild(borderBottomLeftRadius(4 px),
borderBottomRightRadius(4 px))
)
val listItem = styleF.bool(selected => styleS(position.relative,
display.block,
padding(v = 10.px, h = 15.px),
border :=! "1px solid #ecf0f1",
cursor.pointer,
mixinIfElse(selected)(color.white,
fontWeight._500,
backgroundColor :=! "#146699")(
backgroundColor.white,
&.hover(color :=! "#555555",
backgroundColor :=! "#ecf0f1"))
))
}
object DefaultStyle extends Style
case class State(filterText: String = "", selectedItem: String = "", hoverIndex: Int = -1)
class Backend(t: BackendScope[Props, State]) {
def onTextChange(text: String) = {
t.modState(_.copy(filterText = text))
}
def onItemSelect(value: String) = {
t.modState(_.copy(selectedItem = value, hoverIndex = -1))
if (t.props.onItemSelect != null) t.props.onItemSelect(value)
}
}
val component = ReactComponentB[Props]("ReactListView")
.initialState(State())
.backend(new Backend(_))
.render((P, S, B) => {
val fItems = P.items.filter(item => item.toString.toLowerCase.contains(S.filterText.toLowerCase))
<.div(
P.showSearchBox ?= ReactSearchBox(onTextChange = B.onTextChange),
<.ul(P.style.listGroup)(
fItems.map(item => {
val selected = item.toString == S.selectedItem
<.li(P.style.listItem(selected), ^.onClick --> B.onItemSelect(item.toString), item)
})
)
)
})
.build
case class Props(items: List[String], onItemSelect: String => Unit, showSearchBox: Boolean, style: Style)
def apply(items: List[String], onItemSelect: String => Unit = null, showSearchBox: Boolean = false, style: Style = DefaultStyle, ref: js.UndefOr[String] = "", key: js.Any = {}) = component.set(key, ref)(Props(items, onItemSelect, showSearchBox, style))
}
|
coreyauger/scalajs-react-components
|
core/src/main/scala/chandu0101/scalajs/react/components/listviews/ReactListView.scala
|
Scala
|
apache-2.0
| 2,372
|
package vaadin.scala
import vaadin.scala.mixins.ScaladinMixin
import vaadin.scala.mixins.ContainerMixin
import vaadin.scala.mixins.ContainerIndexedMixin
import vaadin.scala.mixins.ContainerHierarchicalMixin
import vaadin.scala.mixins.ContainerOrderedMixin
import vaadin.scala.mixins.ContainerViewerMixin
import vaadin.scala.mixins.ContainerSortableMixin
package mixins {
trait ContainerMixin extends ScaladinMixin
trait ContainerHierarchicalMixin extends ContainerMixin
trait ContainerOrderedMixin extends ContainerMixin
trait ContainerViewerMixin extends ScaladinMixin
trait ContainerSortableMixin extends ContainerOrderedMixin
trait ContainerIndexedMixin extends ContainerOrderedMixin
}
//Base Container trait is outside the companion object so extending classes can have nicer syntax
trait Container extends Wrapper {
import scala.collection.JavaConverters._
def p: com.vaadin.data.Container with ContainerMixin
def item(id: Any): Option[Item] = optionalWrapItem(p.getItem(id))
def itemIds: Iterable[Any] = p.getItemIds.asScala
def removeAllItems(): Boolean = p.removeAllItems
def addContainerProperty(propertyId: Any, propertyType: Class[_], defaultValue: Option[Any] = None): Boolean = p.addContainerProperty(propertyId, propertyType, defaultValue.orNull)
def removeContainerProperty(propertyId: Any): Boolean = p.removeContainerProperty(propertyId)
def removeItem(itemId: Any): Boolean = p.removeItem(itemId)
def addItem(): Option[Any] = Some(p.addItem())
def addItem(itemId: Any): Option[Item] = optionalWrapItem(p.addItem(itemId))
def containsId(itemId: Any): Boolean = p.containsId(itemId)
def size: Int = p.size()
def property(itemId: Any, propertyId: Any): Option[Property] = optionalWrapProperty(p.getContainerProperty(itemId, propertyId))
def propertyIds(): Iterable[Any] = p.getContainerPropertyIds().asScala
def getType(propertyId: Any): Class[_] = p.getType(propertyId)
protected def wrapItem(unwrapped: com.vaadin.data.Item): Item
//override if needed
protected def wrapProperty(unwrapped: com.vaadin.data.Property): Property = new BasicProperty(unwrapped)
protected def optionalWrapItem(item: com.vaadin.data.Item): Option[Item] = item match {
case i: com.vaadin.data.Item => Some(wrapItem(i))
case _ => None
}
protected def optionalWrapProperty(item: com.vaadin.data.Property): Option[Property] = item match {
case i: com.vaadin.data.Item => Some(wrapProperty(i))
case _ => None
}
}
object Container {
def apply(items: Tuple2[Any, Seq[Tuple2[Any, Any]]]*): Container = fill(new IndexedContainer, items: _*)
def filterable(items: Tuple2[Any, Seq[Tuple2[Any, Any]]]*): FilterableContainer = fill(new IndexedContainer with FilterableContainer, items: _*)
def fill[C <: Container](container: C, items: Tuple2[Any, Seq[Tuple2[Any, Any]]]*): C = {
for (item <- items) {
container.addItem(item._1) match {
case Some(containerItem: Item) => {
for (property <- item._2) {
container.addContainerProperty(property._1, property._2.getClass, None)
containerItem.property(property._1) match {
case Some(p: Property) => p.value = (property._2)
case None =>
}
}
}
case None =>
}
}
container
}
trait Hierarchical extends Container {
def p: com.vaadin.data.Container.Hierarchical with ContainerHierarchicalMixin
import scala.collection.JavaConverters._
def children(itemId: Any): Iterable[Any] = p.getChildren(itemId).asScala
def parent(itemId: Any): Any = p.getParent(itemId)
def parent_=(itemId: Any, newParentId: Any): Unit = p.setParent(itemId, newParentId)
def rootItemIds: Iterable[Any] = p.rootItemIds.asScala
def childrenAllowed(itemId: Any): Boolean = p.areChildrenAllowed(itemId)
def setChildrenAllowed(itemId: Any, areChildrenAllowed: Boolean): Unit = p.setChildrenAllowed(itemId, areChildrenAllowed)
def isRoot(itemId: Any): Boolean = p.isRoot(itemId)
def hasChildren(itemId: Any): Boolean = p.hasChildren(itemId)
}
trait Ordered extends Container {
def p: com.vaadin.data.Container.Ordered with ContainerOrderedMixin
def nextItemId(itemId: Any): Any = p.nextItemId(itemId)
def prevItemId(itemId: Any): Any = p.prevItemId(itemId)
def firstItemId: Any = p.firstItemId
def lastItemId: Any = p.lastItemId
def isFirstId(itemId: Any): Boolean = p.isFirstId(itemId)
def isLastId(itemId: Any): Boolean = p.isLastId(itemId)
def addItemAfter(previousItemId: Any): Any = p.addItemAfter(previousItemId)
def addItemAfter(previousItemId: Any, newItemId: Any): Item = wrapItem(p.addItemAfter(previousItemId, newItemId))
}
trait Viewer extends Wrapper {
def p: com.vaadin.data.Container.Viewer with ContainerViewerMixin
def container_=(container: Option[Container]): Unit = if (container.isDefined) p.setContainerDataSource(container.get.p) else p.setContainerDataSource(null)
def container_=(container: Container): Unit = if (container != null) p.setContainerDataSource(container.p) else p.setContainerDataSource(null)
def container: Option[Container] = wrapperFor[Container](p.getContainerDataSource)
}
trait Sortable extends Ordered {
import scala.collection.JavaConverters._
def p: com.vaadin.data.Container.Sortable with ContainerSortableMixin
def sort(propertyId: Array[AnyRef], ascending: Array[Boolean]): Unit = p.sort(propertyId, ascending)
def sortableContainerPropertyIds(): Iterable[Any] = p.getSortableContainerPropertyIds.asScala
}
trait Indexed extends Ordered {
def p: com.vaadin.data.Container.Indexed with ContainerIndexedMixin
def indexOfId(itemId: Any): Int = p.indexOfId(itemId)
def getIdByIndex(index: Int): Any = p.getIdByIndex(index)
def addItemAt(index: Int): Any = p.addItemAt(index)
def addItemAt(index: Int, newItemId: Any): Item = wrapItem(p.addItemAt(index, newItemId))
}
}
|
CloudInABox/scalavaadinutils
|
src/main/scala/vaadin/scala/Container.scala
|
Scala
|
mit
| 6,020
|
package kidstravel.client.components
import diode.Action
import diode.data.Pot
import diode.react.ModelProxy
import diode.react.ReactPot._
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
trait SearchBox {
type T
def getAction: String => Action
def updateAction: Seq[T] => Action
def asString: T => String
case class Props(proxy: ModelProxy[Pot[Seq[T]]])
class Backend($: BackendScope[Props, Unit]) {
private def updateCandidates(e: ReactEventI): Callback = {
val fragment = e.target.value
if (fragment.length >= 3)
$.props >>= (_.proxy.dispatch(getAction(fragment)))
else
$.props >>= (_.proxy.dispatch(updateAction(Seq.empty)))
}
def render(props: Props) =
<.div(
<.input(
^.`type` := "text",
^.placeholder := "Enter at least 3 characters",
^.onKeyUp ==> updateCandidates
),
props.proxy().renderFailed(ex => "Error loading"),
props.proxy().renderPending(_ > 100, _ => <.p("Loading …")),
props.proxy().render(ts => <.ol(ts.map(t => <.li(asString(t)))))
)
}
private val component = ReactComponentB[Props]("SearchBox").
renderBackend[Backend].
build
def apply(proxy: ModelProxy[Pot[Seq[T]]]) = component(Props(proxy))
}
|
devkat/kidstravel
|
client/src/main/scala/kidstravel/client/components/SearchBox.scala
|
Scala
|
apache-2.0
| 1,315
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.