code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.jobmanager
import java.util.concurrent.CompletableFuture
import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestKit}
import akka.util.Timeout
import org.apache.flink.api.common.JobID
import org.apache.flink.runtime.akka.ListeningBehaviour
import org.apache.flink.runtime.checkpoint.{CheckpointCoordinator, CompletedCheckpoint}
import org.apache.flink.runtime.client.JobExecutionException
import org.apache.flink.runtime.io.network.partition.ResultPartitionType
import org.apache.flink.runtime.jobgraph.tasks.{ExternalizedCheckpointSettings, JobCheckpointingSettings}
import org.apache.flink.runtime.jobgraph.{DistributionPattern, JobGraph, JobVertex, ScheduleMode}
import org.apache.flink.runtime.jobmanager.Tasks._
import org.apache.flink.runtime.jobmanager.scheduler.{NoResourceAvailableException, SlotSharingGroup}
import org.apache.flink.runtime.messages.JobManagerMessages._
import org.apache.flink.runtime.testingUtils.TestingJobManagerMessages._
import org.apache.flink.runtime.testingUtils.{ScalaTestingUtils, TestingUtils}
import org.apache.flink.runtime.testtasks._
import org.junit.runner.RunWith
import org.mockito.Mockito
import org.mockito.Mockito._
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.language.postfixOps
import scala.util.Random
@RunWith(classOf[JUnitRunner])
class JobManagerITCase(_system: ActorSystem)
extends TestKit(_system)
with ImplicitSender
with WordSpecLike
with Matchers
with BeforeAndAfterAll
with ScalaTestingUtils {
implicit val duration = 1 minute
implicit val timeout = Timeout.durationToTimeout(duration)
def this() = this(ActorSystem("TestingActorSystem", TestingUtils.testConfig))
override def afterAll(): Unit = {
TestKit.shutdownActorSystem(system)
}
"The JobManager actor" must {
"handle jobs when not enough slots" in {
val vertex = new JobVertex("Test Vertex")
vertex.setParallelism(2)
vertex.setInvokableClass(classOf[BlockingNoOpInvokable])
val jobGraph = new JobGraph("Test Job", vertex)
val cluster = TestingUtils.startTestingCluster(1)
val jmGateway = cluster.getLeaderGateway(1 seconds)
try {
val response = jmGateway.ask(RequestTotalNumberOfSlots, timeout.duration).mapTo[Int]
val availableSlots = Await.result(response, duration)
availableSlots should equal(1)
within(2 second) {
jmGateway.tell(SubmitJob(jobGraph, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph.getJobID()))
}
within(2 second) {
val response = expectMsgType[JobResultFailure]
val exception = response.cause.deserializeError(getClass.getClassLoader())
exception match {
case e: JobExecutionException =>
jobGraph.getJobID should equal(e.getJobID)
new NoResourceAvailableException(1,1,0) should equal(e.getCause())
case e => fail(s"Received wrong exception of type $e.")
}
}
jmGateway.tell(NotifyWhenJobRemoved(jobGraph.getJobID), self)
expectMsg(true)
}
finally {
cluster.stop()
}
}
"support immediate scheduling of a single vertex" in {
val num_tasks = 133
val vertex = new JobVertex("Test Vertex")
vertex.setParallelism(num_tasks)
vertex.setInvokableClass(classOf[NoOpInvokable])
val jobGraph = new JobGraph("Test Job", vertex)
val cluster = TestingUtils.startTestingCluster(num_tasks)
val jmGateway = cluster.getLeaderGateway(1 seconds)
try {
val response = jmGateway.ask(RequestTotalNumberOfSlots, timeout.duration).mapTo[Int]
val availableSlots = Await.result(response, duration)
availableSlots should equal(num_tasks)
within(TestingUtils.TESTING_DURATION) {
jmGateway.tell(SubmitJob(jobGraph, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph.getJobID))
val result = expectMsgType[JobResultSuccess]
result.result.getJobId() should equal(jobGraph.getJobID)
}
jmGateway.tell(NotifyWhenJobRemoved(jobGraph.getJobID), self)
expectMsg(true)
} finally {
cluster.stop()
}
}
"support queued scheduling of a single vertex" in {
val num_tasks = 111
val vertex = new JobVertex("Test Vertex")
vertex.setParallelism(num_tasks)
vertex.setInvokableClass(classOf[NoOpInvokable])
val jobGraph = new JobGraph("Test job", vertex)
jobGraph.setAllowQueuedScheduling(true)
val cluster = TestingUtils.startTestingCluster(10)
val jmGateway = cluster.getLeaderGateway(1 seconds)
try {
within(TestingUtils.TESTING_DURATION) {
jmGateway.tell(SubmitJob(jobGraph, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph.getJobID))
val result = expectMsgType[JobResultSuccess]
result.result.getJobId() should equal(jobGraph.getJobID)
}
jmGateway.tell(NotifyWhenJobRemoved(jobGraph.getJobID), self)
expectMsg(true)
} finally {
cluster.stop()
}
}
"support forward jobs" in {
val num_tasks = 31
val sender = new JobVertex("Sender")
val receiver = new JobVertex("Receiver")
sender.setInvokableClass(classOf[Sender])
receiver.setInvokableClass(classOf[Receiver])
sender.setParallelism(num_tasks)
receiver.setParallelism(num_tasks)
receiver.connectNewDataSetAsInput(sender, DistributionPattern.POINTWISE,
ResultPartitionType.PIPELINED)
val jobGraph = new JobGraph("Pointwise Job", sender, receiver)
val cluster = TestingUtils.startTestingCluster(2 * num_tasks)
val jmGateway = cluster.getLeaderGateway(1 seconds)
try {
within(TestingUtils.TESTING_DURATION) {
jmGateway.tell(SubmitJob(jobGraph, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph.getJobID))
val result = expectMsgType[JobResultSuccess]
result.result.getJobId() should equal(jobGraph.getJobID)
}
jmGateway.tell(NotifyWhenJobRemoved(jobGraph.getJobID), self)
expectMsg(true)
} finally {
cluster.stop()
}
}
"support bipartite job" in {
val num_tasks = 31
val sender = new JobVertex("Sender")
val receiver = new JobVertex("Receiver")
sender.setInvokableClass(classOf[Sender])
receiver.setInvokableClass(classOf[AgnosticReceiver])
sender.setParallelism(num_tasks)
receiver.setParallelism(num_tasks)
receiver.connectNewDataSetAsInput(sender, DistributionPattern.POINTWISE,
ResultPartitionType.PIPELINED)
val jobGraph = new JobGraph("Bipartite Job", sender, receiver)
val cluster = TestingUtils.startTestingCluster(2 * num_tasks)
val jmGateway = cluster.getLeaderGateway(1 seconds)
try {
within(TestingUtils.TESTING_DURATION) {
jmGateway.tell(SubmitJob(jobGraph, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph.getJobID))
expectMsgType[JobResultSuccess]
}
jmGateway.tell(NotifyWhenJobRemoved(jobGraph.getJobID), self)
expectMsg(true)
} finally {
cluster.stop()
}
}
"support two input job failing edge mismatch" in {
val num_tasks = 1
val sender1 = new JobVertex("Sender1")
val sender2 = new JobVertex("Sender2")
val receiver = new JobVertex("Receiver")
sender1.setInvokableClass(classOf[Sender])
sender2.setInvokableClass(classOf[Sender])
receiver.setInvokableClass(classOf[AgnosticTertiaryReceiver])
sender1.setParallelism(num_tasks)
sender2.setParallelism(2 * num_tasks)
receiver.setParallelism(3 * num_tasks)
receiver.connectNewDataSetAsInput(sender1, DistributionPattern.POINTWISE,
ResultPartitionType.PIPELINED)
receiver.connectNewDataSetAsInput(sender2, DistributionPattern.ALL_TO_ALL,
ResultPartitionType.PIPELINED)
val jobGraph = new JobGraph("Bipartite Job", sender1, receiver, sender2)
val cluster = TestingUtils.startTestingCluster(6 * num_tasks)
val jmGateway = cluster.getLeaderGateway(1 seconds)
try {
within(TestingUtils.TESTING_DURATION) {
jmGateway.tell(SubmitJob(jobGraph, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph.getJobID))
val failure = expectMsgType[JobResultFailure]
val exception = failure.cause.deserializeError(getClass.getClassLoader())
exception match {
case e: JobExecutionException =>
jobGraph.getJobID should equal(e.getJobID)
case e => fail(s"Received wrong exception $e.")
}
}
jmGateway.tell(NotifyWhenJobRemoved(jobGraph.getJobID), self)
expectMsg(true)
} finally {
cluster.stop()
}
}
"support two input job" in {
val num_tasks = 11
val sender1 = new JobVertex("Sender1")
val sender2 = new JobVertex("Sender2")
val receiver = new JobVertex("Receiver")
sender1.setInvokableClass(classOf[Sender])
sender2.setInvokableClass(classOf[Sender])
receiver.setInvokableClass(classOf[AgnosticBinaryReceiver])
sender1.setParallelism(num_tasks)
sender2.setParallelism(2 * num_tasks)
receiver.setParallelism(3 * num_tasks)
receiver.connectNewDataSetAsInput(sender1, DistributionPattern.POINTWISE,
ResultPartitionType.PIPELINED)
receiver.connectNewDataSetAsInput(sender2, DistributionPattern.ALL_TO_ALL,
ResultPartitionType.PIPELINED)
val jobGraph = new JobGraph("Bipartite Job", sender1, receiver, sender2)
val cluster = TestingUtils.startTestingCluster(6 * num_tasks)
val jmGateway = cluster.getLeaderGateway(1 seconds)
try {
within(TestingUtils.TESTING_DURATION) {
jmGateway.tell(SubmitJob(jobGraph, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph.getJobID))
expectMsgType[JobResultSuccess]
}
jmGateway.tell(NotifyWhenJobRemoved(jobGraph.getJobID), self)
expectMsg(true)
} finally {
cluster.stop()
}
}
"support scheduling all at once" in {
val num_tasks = 16
val sender = new JobVertex("Sender")
val forwarder = new JobVertex("Forwarder")
val receiver = new JobVertex("Receiver")
sender.setInvokableClass(classOf[Sender])
forwarder.setInvokableClass(classOf[Forwarder])
receiver.setInvokableClass(classOf[AgnosticReceiver])
sender.setParallelism(num_tasks)
forwarder.setParallelism(num_tasks)
receiver.setParallelism(num_tasks)
val sharingGroup = new SlotSharingGroup(sender.getID, receiver.getID)
sender.setSlotSharingGroup(sharingGroup)
forwarder.setSlotSharingGroup(sharingGroup)
receiver.setSlotSharingGroup(sharingGroup)
forwarder.connectNewDataSetAsInput(sender, DistributionPattern.ALL_TO_ALL,
ResultPartitionType.PIPELINED)
receiver.connectNewDataSetAsInput(forwarder, DistributionPattern.ALL_TO_ALL,
ResultPartitionType.PIPELINED)
val jobGraph = new JobGraph("Forwarding Job", sender, forwarder, receiver)
jobGraph.setScheduleMode(ScheduleMode.EAGER)
val cluster = TestingUtils.startTestingCluster(num_tasks, 1)
val jmGateway = cluster.getLeaderGateway(1 seconds)
try {
within(TestingUtils.TESTING_DURATION) {
jmGateway.tell(SubmitJob(jobGraph, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph.getJobID))
expectMsgType[JobResultSuccess]
jmGateway.tell(NotifyWhenJobRemoved(jobGraph.getJobID), self)
expectMsg(true)
}
} finally {
cluster.stop()
}
}
"handle job with a failing sender vertex" in {
val num_tasks = 100
val sender = new JobVertex("Sender")
val receiver = new JobVertex("Receiver")
sender.setInvokableClass(classOf[ExceptionSender])
receiver.setInvokableClass(classOf[Receiver])
sender.setParallelism(num_tasks)
receiver.setParallelism(num_tasks)
receiver.connectNewDataSetAsInput(sender, DistributionPattern.POINTWISE,
ResultPartitionType.PIPELINED)
val jobGraph = new JobGraph("Pointwise Job", sender, receiver)
val cluster = TestingUtils.startTestingCluster(num_tasks)
val jmGateway = cluster.getLeaderGateway(1 seconds)
try {
within(TestingUtils.TESTING_DURATION) {
jmGateway.tell(RequestTotalNumberOfSlots, self)
expectMsg(num_tasks)
}
within(TestingUtils.TESTING_DURATION) {
jmGateway.tell(SubmitJob(jobGraph, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph.getJobID))
val failure = expectMsgType[JobResultFailure]
val exception = failure.cause.deserializeError(getClass.getClassLoader())
exception match {
case e: JobExecutionException =>
jobGraph.getJobID should equal(e.getJobID)
case e => fail(s"Received wrong exception $e.")
}
}
jmGateway.tell(NotifyWhenJobRemoved(jobGraph.getJobID), self)
expectMsg(true)
} finally {
cluster.stop()
}
}
"handle job with an occasionally failing sender vertex" in {
val num_tasks = 100
val sender = new JobVertex("Sender")
val receiver = new JobVertex("Receiver")
sender.setInvokableClass(classOf[SometimesExceptionSender])
receiver.setInvokableClass(classOf[Receiver])
// set failing senders
SometimesExceptionSender.failingSenders = Seq.fill(10)(Random.nextInt(num_tasks)).toSet
sender.setParallelism(num_tasks)
receiver.setParallelism(num_tasks)
receiver.connectNewDataSetAsInput(sender, DistributionPattern.POINTWISE,
ResultPartitionType.PIPELINED)
val jobGraph = new JobGraph("Pointwise Job", sender, receiver)
val cluster = TestingUtils.startTestingCluster(num_tasks)
val jmGateway = cluster.getLeaderGateway(1 seconds)
try {
within(TestingUtils.TESTING_DURATION) {
jmGateway.tell(RequestTotalNumberOfSlots, self)
expectMsg(num_tasks)
}
within(TestingUtils.TESTING_DURATION) {
jmGateway.tell(SubmitJob(jobGraph, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph.getJobID))
val failure = expectMsgType[JobResultFailure]
val exception = failure.cause.deserializeError(getClass.getClassLoader())
exception match {
case e: JobExecutionException =>
jobGraph.getJobID should equal(e.getJobID)
case e => fail(s"Received wrong exception $e.")
}
}
jmGateway.tell(NotifyWhenJobRemoved(jobGraph.getJobID), self)
expectMsg(true)
} finally {
cluster.stop()
}
}
"handle job with a failing receiver vertex" in {
val num_tasks = 200
val sender = new JobVertex("Sender")
val receiver = new JobVertex("Receiver")
sender.setInvokableClass(classOf[Sender])
receiver.setInvokableClass(classOf[ExceptionReceiver])
sender.setParallelism(num_tasks)
receiver.setParallelism(num_tasks)
receiver.connectNewDataSetAsInput(sender, DistributionPattern.POINTWISE,
ResultPartitionType.PIPELINED)
val jobGraph = new JobGraph("Pointwise job", sender, receiver)
val cluster = TestingUtils.startTestingCluster(2 * num_tasks)
val jmGateway = cluster.getLeaderGateway(1 seconds)
try {
within(TestingUtils.TESTING_DURATION) {
jmGateway.tell(SubmitJob(jobGraph, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph.getJobID))
val failure = expectMsgType[JobResultFailure]
val exception = failure.cause.deserializeError(getClass.getClassLoader())
exception match {
case e: JobExecutionException =>
jobGraph.getJobID should equal(e.getJobID)
case e => fail(s"Received wrong exception $e.")
}
}
jmGateway.tell(NotifyWhenJobRemoved(jobGraph.getJobID), self)
expectMsg(true)
} finally {
cluster.stop()
}
}
"handle job with all vertices failing during instantiation" in {
val num_tasks = 200
val sender = new JobVertex("Sender")
val receiver = new JobVertex("Receiver")
sender.setInvokableClass(classOf[InstantiationErrorSender])
receiver.setInvokableClass(classOf[Receiver])
sender.setParallelism(num_tasks)
receiver.setParallelism(num_tasks)
receiver.connectNewDataSetAsInput(sender, DistributionPattern.POINTWISE,
ResultPartitionType.PIPELINED)
val jobGraph = new JobGraph("Pointwise job", sender, receiver)
val cluster = TestingUtils.startTestingCluster(num_tasks)
val jmGateway = cluster.getLeaderGateway(1 seconds)
try {
within(TestingUtils.TESTING_DURATION) {
jmGateway.tell(RequestTotalNumberOfSlots, self)
expectMsg(num_tasks)
jmGateway.tell(SubmitJob(jobGraph, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph.getJobID))
val failure = expectMsgType[JobResultFailure]
val exception = failure.cause.deserializeError(getClass.getClassLoader())
exception match {
case e: JobExecutionException =>
jobGraph.getJobID should equal(e.getJobID)
case e => fail(s"Received wrong exception $e.")
}
}
jmGateway.tell(NotifyWhenJobRemoved(jobGraph.getJobID), self)
expectMsg(true)
} finally {
cluster.stop()
}
}
"handle job with some vertices failing during instantiation" in {
val num_tasks = 200
val sender = new JobVertex("Sender")
val receiver = new JobVertex("Receiver")
sender.setInvokableClass(classOf[SometimesInstantiationErrorSender])
receiver.setInvokableClass(classOf[Receiver])
// set the failing sender tasks
SometimesInstantiationErrorSender.failingSenders =
Seq.fill(10)(Random.nextInt(num_tasks)).toSet
sender.setParallelism(num_tasks)
receiver.setParallelism(num_tasks)
receiver.connectNewDataSetAsInput(sender, DistributionPattern.POINTWISE,
ResultPartitionType.PIPELINED)
val jobGraph = new JobGraph("Pointwise job", sender, receiver)
val cluster = TestingUtils.startTestingCluster(num_tasks)
val jmGateway = cluster.getLeaderGateway(1 seconds)
try {
within(TestingUtils.TESTING_DURATION) {
jmGateway.tell(RequestTotalNumberOfSlots, self)
expectMsg(num_tasks)
jmGateway.tell(SubmitJob(jobGraph, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph.getJobID))
val failure = expectMsgType[JobResultFailure]
val exception = failure.cause.deserializeError(getClass.getClassLoader())
exception match {
case e: JobExecutionException =>
jobGraph.getJobID should equal(e.getJobID)
case e => fail(s"Received wrong exception $e.")
}
}
jmGateway.tell(NotifyWhenJobRemoved(jobGraph.getJobID), self)
expectMsg(true)
} finally {
cluster.stop()
}
}
"check that all job vertices have completed the call to finalizeOnMaster before the job " +
"completes" in {
val num_tasks = 31
val source = new JobVertex("Source")
val sink = new WaitingOnFinalizeJobVertex("Sink", 500)
source.setInvokableClass(classOf[WaitingNoOpInvokable])
sink.setInvokableClass(classOf[NoOpInvokable])
source.setParallelism(num_tasks)
sink.setParallelism(num_tasks)
val jobGraph = new JobGraph("SubtaskInFinalStateRaceCondition", source, sink)
val cluster = TestingUtils.startTestingCluster(2*num_tasks)
val jmGateway = cluster.getLeaderGateway(1 seconds)
try{
within(TestingUtils.TESTING_DURATION){
jmGateway.tell(SubmitJob(jobGraph, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph.getJobID))
expectMsgType[JobResultSuccess]
}
sink.finished should equal(true)
jmGateway.tell(NotifyWhenJobRemoved(jobGraph.getJobID), self)
expectMsg(true)
} finally{
cluster.stop()
}
}
"remove execution graphs when the client ends the session explicitly" in {
val vertex = new JobVertex("Test Vertex")
vertex.setInvokableClass(classOf[NoOpInvokable])
val jobGraph1 = new JobGraph("Test Job", vertex)
val slowVertex = new WaitingOnFinalizeJobVertex("Long running Vertex", 2000)
slowVertex.setInvokableClass(classOf[NoOpInvokable])
val jobGraph2 = new JobGraph("Long running Job", slowVertex)
val cluster = TestingUtils.startTestingCluster(1)
val jm = cluster.getLeaderGateway(1 seconds)
try {
within(TestingUtils.TESTING_DURATION) {
/* jobgraph1 is removed after being terminated */
jobGraph1.setSessionTimeout(9999)
jm.tell(SubmitJob(jobGraph1, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph1.getJobID))
expectMsgType[JobResultSuccess]
// should not be archived yet
jm.tell(RequestExecutionGraph(jobGraph1.getJobID), self)
var cachedGraph = expectMsgType[ExecutionGraphFound].executionGraph
assert(!cachedGraph.isArchived)
jm.tell(RemoveCachedJob(jobGraph1.getJobID), self)
jm.tell(RequestExecutionGraph(jobGraph1.getJobID), self)
cachedGraph = expectMsgType[ExecutionGraphFound].executionGraph
assert(cachedGraph.isArchived)
/* jobgraph2 is removed while running */
jobGraph2.setSessionTimeout(9999)
jm.tell(SubmitJob(jobGraph2, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph2.getJobID))
// job still running
jm.tell(RemoveCachedJob(jobGraph2.getJobID), self)
expectMsgType[JobResultSuccess]
// should be archived!
jm.tell(RequestExecutionGraph(jobGraph2.getJobID), self)
cachedGraph = expectMsgType[ExecutionGraphFound].executionGraph
assert(cachedGraph.isArchived)
}
} finally {
cluster.stop()
}
}
"remove execution graphs when when the client's session times out" in {
val vertex = new JobVertex("Test Vertex")
vertex.setParallelism(1)
vertex.setInvokableClass(classOf[NoOpInvokable])
val jobGraph = new JobGraph("Test Job", vertex)
val cluster = TestingUtils.startTestingCluster(1)
val jm = cluster.getLeaderGateway(1 seconds)
try {
within(TestingUtils.TESTING_DURATION) {
// try multiple times in case of flaky environments
var testSucceeded = false
var numTries = 0
while(!testSucceeded && numTries < 10) {
try {
// should be removed immediately
jobGraph.setSessionTimeout(0)
jm.tell(SubmitJob(jobGraph, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph.getJobID))
expectMsgType[JobResultSuccess]
jm.tell(RequestExecutionGraph(jobGraph.getJobID), self)
val cachedGraph2 = expectMsgType[ExecutionGraphFound].executionGraph
assert(cachedGraph2.isArchived)
// removed after 2 seconds
jobGraph.setSessionTimeout(2)
jm.tell(SubmitJob(jobGraph, ListeningBehaviour.EXECUTION_RESULT), self)
expectMsg(JobSubmitSuccess(jobGraph.getJobID))
expectMsgType[JobResultSuccess]
// should not be archived yet
jm.tell(RequestExecutionGraph(jobGraph.getJobID), self)
val cachedGraph = expectMsgType[ExecutionGraphFound].executionGraph
assert(!cachedGraph.isArchived)
// wait until graph is archived
Thread.sleep(3000)
jm.tell(RequestExecutionGraph(jobGraph.getJobID), self)
val graph = expectMsgType[ExecutionGraphFound].executionGraph
assert(graph.isArchived)
testSucceeded = true
} catch {
case e: Throwable =>
numTries += 1
}
}
if(!testSucceeded) {
fail("Test case failed after " + numTries + " probes.")
}
}
} finally {
cluster.stop()
}
}
// ------------------------------------------------------------------------
// Savepoint messages
// ------------------------------------------------------------------------
"handle trigger savepoint response for non-existing job" in {
val deadline = TestingUtils.TESTING_DURATION.fromNow
val flinkCluster = TestingUtils.startTestingCluster(0, 0)
try {
within(deadline.timeLeft) {
val jobManager = flinkCluster
.getLeaderGateway(deadline.timeLeft)
// we have to make sure that the job manager knows also that he is the leader
// in case of standalone leader retrieval this can happen after the getLeaderGateway call
val leaderFuture = jobManager.ask(NotifyWhenLeader, timeout.duration)
Await.ready(leaderFuture, timeout.duration)
val jobId = new JobID()
// Trigger savepoint for non-existing job
jobManager.tell(TriggerSavepoint(jobId, Option.apply("any")), testActor)
val response = expectMsgType[TriggerSavepointFailure](deadline.timeLeft)
// Verify the response
response.jobId should equal(jobId)
response.cause.getClass should equal(classOf[IllegalArgumentException])
}
}
finally {
flinkCluster.stop()
}
}
"handle trigger savepoint response for job with disabled checkpointing" in {
val deadline = TestingUtils.TESTING_DURATION.fromNow
val flinkCluster = TestingUtils.startTestingCluster(1, 1)
try {
within(deadline.timeLeft) {
val jobManager = flinkCluster
.getLeaderGateway(deadline.timeLeft)
val jobVertex = new JobVertex("Blocking vertex")
jobVertex.setInvokableClass(classOf[BlockingNoOpInvokable])
val jobGraph = new JobGraph(jobVertex)
// Submit job w/o checkpointing configured
jobManager.tell(SubmitJob(jobGraph, ListeningBehaviour.DETACHED), testActor)
expectMsg(JobSubmitSuccess(jobGraph.getJobID()))
// Trigger savepoint for job with disabled checkpointing
jobManager.tell(TriggerSavepoint(jobGraph.getJobID(), Option.apply("any")), testActor)
val response = expectMsgType[TriggerSavepointFailure](deadline.timeLeft)
// Verify the response
response.jobId should equal(jobGraph.getJobID())
response.cause.getClass should equal(classOf[IllegalStateException])
response.cause.getMessage should (include("disabled") or include("configured"))
}
}
finally {
flinkCluster.stop()
}
}
"handle trigger savepoint response after trigger savepoint failure" in {
val deadline = TestingUtils.TESTING_DURATION.fromNow
val flinkCluster = TestingUtils.startTestingCluster(1, 1)
try {
within(deadline.timeLeft) {
val jobManager = flinkCluster
.getLeaderGateway(deadline.timeLeft)
val jobVertex = new JobVertex("Blocking vertex")
jobVertex.setInvokableClass(classOf[BlockingNoOpInvokable])
val jobGraph = new JobGraph(jobVertex)
jobGraph.setSnapshotSettings(new JobCheckpointingSettings(
java.util.Collections.emptyList(),
java.util.Collections.emptyList(),
java.util.Collections.emptyList(),
60000,
60000,
60000,
1,
ExternalizedCheckpointSettings.none,
null,
true))
// Submit job...
jobManager.tell(SubmitJob(jobGraph, ListeningBehaviour.DETACHED), testActor)
expectMsg(JobSubmitSuccess(jobGraph.getJobID()))
// Request the execution graph and set a checkpoint coordinator mock
jobManager.tell(RequestExecutionGraph(jobGraph.getJobID), testActor)
val executionGraph = expectMsgType[ExecutionGraphFound](
deadline.timeLeft).executionGraph
// Mock the checkpoint coordinator
val checkpointCoordinator = mock(classOf[CheckpointCoordinator])
doThrow(new Exception("Expected Test Exception"))
.when(checkpointCoordinator)
.triggerSavepoint(org.mockito.Matchers.anyLong(), org.mockito.Matchers.anyString())
// Update the savepoint coordinator field
val field = executionGraph.getClass.getDeclaredField("checkpointCoordinator")
field.setAccessible(true)
field.set(executionGraph, checkpointCoordinator)
// Trigger savepoint for job
jobManager.tell(TriggerSavepoint(jobGraph.getJobID(), Option.apply("any")), testActor)
val response = expectMsgType[TriggerSavepointFailure](deadline.timeLeft)
// Verify the response
response.jobId should equal(jobGraph.getJobID())
response.cause.getCause.getClass should equal(classOf[Exception])
response.cause.getCause.getMessage should equal("Expected Test Exception")
}
}
finally {
flinkCluster.stop()
}
}
"handle failed savepoint triggering" in {
val deadline = TestingUtils.TESTING_DURATION.fromNow
val flinkCluster = TestingUtils.startTestingCluster(1, 1)
try {
within(deadline.timeLeft) {
val jobManager = flinkCluster
.getLeaderGateway(deadline.timeLeft)
val jobVertex = new JobVertex("Blocking vertex")
jobVertex.setInvokableClass(classOf[BlockingNoOpInvokable])
val jobGraph = new JobGraph(jobVertex)
jobGraph.setSnapshotSettings(new JobCheckpointingSettings(
java.util.Collections.emptyList(),
java.util.Collections.emptyList(),
java.util.Collections.emptyList(),
60000,
60000,
60000,
1,
ExternalizedCheckpointSettings.none,
null,
true))
// Submit job...
jobManager.tell(SubmitJob(jobGraph, ListeningBehaviour.DETACHED), testActor)
expectMsg(JobSubmitSuccess(jobGraph.getJobID()))
// Mock the checkpoint coordinator
val checkpointCoordinator = mock(classOf[CheckpointCoordinator])
doThrow(new Exception("Expected Test Exception"))
.when(checkpointCoordinator)
.triggerSavepoint(org.mockito.Matchers.anyLong(), org.mockito.Matchers.anyString())
val savepointPathPromise = new CompletableFuture[CompletedCheckpoint]()
doReturn(savepointPathPromise)
.when(checkpointCoordinator)
.triggerSavepoint(org.mockito.Matchers.anyLong(), org.mockito.Matchers.anyString())
// Request the execution graph and set a checkpoint coordinator mock
jobManager.tell(RequestExecutionGraph(jobGraph.getJobID), testActor)
val executionGraph = expectMsgType[ExecutionGraphFound](
deadline.timeLeft).executionGraph
// Update the savepoint coordinator field
val field = executionGraph.getClass.getDeclaredField("checkpointCoordinator")
field.setAccessible(true)
field.set(executionGraph, checkpointCoordinator)
// Trigger savepoint for job
jobManager.tell(TriggerSavepoint(jobGraph.getJobID(), Option.apply("any")), testActor)
// Fail the promise
savepointPathPromise.completeExceptionally(new Exception("Expected Test Exception"))
val response = expectMsgType[TriggerSavepointFailure](deadline.timeLeft)
// Verify the response
response.jobId should equal(jobGraph.getJobID())
response.cause.getCause.getClass should equal(classOf[Exception])
response.cause.getCause.getMessage should equal("Expected Test Exception")
}
}
finally {
flinkCluster.stop()
}
}
"handle trigger savepoint response after succeeded savepoint future" in {
val deadline = TestingUtils.TESTING_DURATION.fromNow
val flinkCluster = TestingUtils.startTestingCluster(1, 1)
try {
within(deadline.timeLeft) {
val jobManager = flinkCluster
.getLeaderGateway(deadline.timeLeft)
val jobVertex = new JobVertex("Blocking vertex")
jobVertex.setInvokableClass(classOf[BlockingNoOpInvokable])
val jobGraph = new JobGraph(jobVertex)
jobGraph.setSnapshotSettings(new JobCheckpointingSettings(
java.util.Collections.emptyList(),
java.util.Collections.emptyList(),
java.util.Collections.emptyList(),
60000,
60000,
60000,
1,
ExternalizedCheckpointSettings.none,
null,
true))
// Submit job...
jobManager.tell(SubmitJob(jobGraph, ListeningBehaviour.DETACHED), testActor)
expectMsg(JobSubmitSuccess(jobGraph.getJobID()))
// Mock the checkpoint coordinator
val checkpointCoordinator = mock(classOf[CheckpointCoordinator])
doThrow(new Exception("Expected Test Exception"))
.when(checkpointCoordinator)
.triggerSavepoint(org.mockito.Matchers.anyLong(), org.mockito.Matchers.anyString())
val savepointPromise = new CompletableFuture[CompletedCheckpoint]()
doReturn(savepointPromise)
.when(checkpointCoordinator)
.triggerSavepoint(org.mockito.Matchers.anyLong(), org.mockito.Matchers.anyString())
// Request the execution graph and set a checkpoint coordinator mock
jobManager.tell(RequestExecutionGraph(jobGraph.getJobID), testActor)
val executionGraph = expectMsgType[ExecutionGraphFound](
deadline.timeLeft).executionGraph
// Update the savepoint coordinator field
val field = executionGraph.getClass.getDeclaredField("checkpointCoordinator")
field.setAccessible(true)
field.set(executionGraph, checkpointCoordinator)
// Trigger savepoint for job
jobManager.tell(TriggerSavepoint(jobGraph.getJobID(), Option.apply("any")), testActor)
val checkpoint = Mockito.mock(classOf[CompletedCheckpoint])
when(checkpoint.getExternalPointer).thenReturn("Expected test savepoint path")
// Succeed the promise
savepointPromise.complete(checkpoint)
val response = expectMsgType[TriggerSavepointSuccess](deadline.timeLeft)
// Verify the response
response.jobId should equal(jobGraph.getJobID())
response.savepointPath should equal("Expected test savepoint path")
}
}
finally {
flinkCluster.stop()
}
}
}
class WaitingOnFinalizeJobVertex(name: String, val waitingTime: Long) extends JobVertex(name){
var finished = false
override def finalizeOnMaster(loader: ClassLoader): Unit = {
Thread.sleep(waitingTime)
finished = true
}
}
}
| zohar-mizrahi/flink | flink-runtime/src/test/scala/org/apache/flink/runtime/jobmanager/JobManagerITCase.scala | Scala | apache-2.0 | 37,133 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.benchmarks
import java.util.concurrent.TimeUnit
import monix.eval.Coeval
import org.openjdk.jmh.annotations._
/** To do comparative benchmarks between versions:
*
* benchmarks/run-benchmark CoevalMapStreamBenchmark
*
* This will generate results in `benchmarks/results`.
*
* Or to run the benchmark from within SBT:
*
* jmh:run -i 10 -wi 10 -f 2 -t 1 monix.benchmarks.CoevalMapStreamBenchmark
*
* Which means "10 iterations", "10 warm-up iterations", "2 forks", "1 thread".
* Please note that benchmarks should be usually executed at least in
* 10 iterations (as a rule of thumb), but more is better.
*/
@State(Scope.Thread)
@BenchmarkMode(Array(Mode.Throughput))
@OutputTimeUnit(TimeUnit.SECONDS)
class CoevalMapStreamBenchmark {
import CoevalMapStreamBenchmark.streamTest
@Benchmark
def one(): Long = streamTest(12000, 1)
@Benchmark
def batch30(): Long = streamTest(1000, 30)
@Benchmark
def batch120(): Long = streamTest(100, 120)
}
object CoevalMapStreamBenchmark {
def streamTest(times: Int, batchSize: Int): Long = {
var stream = range(0, times)
var i = 0
while (i < batchSize) {
stream = mapStream(addOne)(stream)
i += 1
}
sum(0)(stream).value
}
final case class Stream(value: Int, next: Coeval[Option[Stream]])
val addOne: Int => Int = (x: Int) => x + 1
def range(from: Int, until: Int): Option[Stream] =
if (from < until)
Some(Stream(from, Coeval(range(from + 1, until))))
else
None
def mapStream(f: Int => Int)(box: Option[Stream]): Option[Stream] =
box match {
case Some(Stream(value, next)) =>
Some(Stream(f(value), next.map(mapStream(f))))
case None =>
None
}
def sum(acc: Long)(box: Option[Stream]): Coeval[Long] =
box match {
case Some(Stream(value, next)) =>
next.flatMap(sum(acc + value))
case None =>
Coeval.pure(acc)
}
}
| Wogan/monix | benchmarks/shared/src/main/scala/monix/benchmarks/CoevalMapStreamBenchmark.scala | Scala | apache-2.0 | 2,625 |
package es.weso.shex.jast
import com.typesafe.config._
import java.io.File
import argonaut._, Argonaut._
import java.nio.ByteBuffer
import java.nio.channels.ReadableByteChannel
import scala.util.{ Try, Success => TrySuccess, Failure => TryFailure }
import scala.io._
import org.scalatest.FunSpec
import org.scalatest._
import es.weso.shex.jast.AST._
import es.weso.shex._
class ShExParserAll extends Driver {
describe("Test Shex parser:JSON -> AST -> Schema = SheXC -> Schema") {
val parsedSchemas = getParsedSchemas(schemasFolder)
for ((file, json) <- parsedSchemas) {
it(s"Should handle ${file.getName}") {
testComparingSchemas(file)
}
}
}
describe("Negative JSON tests (parser should complain...)") {
val negativeSyntaxFiles = getFilesFromFolder(negativeSyntaxFolder)
for (file <- negativeSyntaxFiles) {
it(s"Should fail to parse ${file.getName}") {
val t = Try {
parseShExSchema(file)
}
if (t.isSuccess) fail(s"Parsed ok. Result: ${t.get}")
// else info("Failed to parse as expected")
}
}
}
}
| labra/ShExcala | src/compat/scala/es/weso/shex/jast/ShExParserAll.scala | Scala | mit | 1,119 |
/*
* Copyright (C) 2016-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.scaladsl.persistence
import scala.collection.immutable
import akka.event.LoggingAdapter
import scala.util.control.NoStackTrace
import scala.annotation.tailrec
import akka.event.Logging
import akka.actor.ActorRef
import scala.reflect.ClassTag
object PersistentEntity {
/**
* Commands to a `PersistentEntity` must implement this interface
* to define the reply type.
*
* `akka.Done` can optionally be used as a "standard" acknowledgment message.
*
* @tparam R the type of the reply message
*/
trait ReplyType[R] {
type ReplyType = R
}
/**
* Standard exception when rejecting invalid commands.
*/
final case class InvalidCommandException(message: String) extends IllegalArgumentException(message) with NoStackTrace
/**
* Exception that is used when command is not handled
*/
final case class UnhandledCommandException(message: String) extends IllegalArgumentException(message) with NoStackTrace
/**
* Exception that is used when persist fails.
*/
final case class PersistException(message: String) extends IllegalArgumentException(message) with NoStackTrace
}
/**
* A `PersistentEntity` has a stable entity identifier, with which
* it can be accessed from anywhere in the cluster. It is run by an actor
* and the state is persistent using event sourcing.
*
* `initialState` and `behavior` are abstract methods that your concrete subclass must implement.
* The behavior is defined as a set of actions given a state. The actions are functions to process
* incoming commands and persisted events.
*
* The `PersistentEntity` receives commands of type `Command` that can be validated before
* persisting state changes as events of type `Event`. The functions that process incoming
* commands are registered in the `Actions` using `onCommand` of the
* `Actions`.
*
* A command may also be read-only and only perform some side-effect, such as replying
* to the request. Such command handlers are registered using `onReadOnlyCommand`
* of the `Actions`. Replies are sent with the `reply` method of the context that
* is passed to the command handler function.
*
* A command handler returns a `Persist` directive that defines what event or events,
* if any, to persist. Use the `thenPersist`, `thenPersistAll` or `done` methods of the
* context that is passed to the command handler function to create the `Persist` directive.
*
* When an event has been persisted successfully the state of type `State` is updated by
* applying the event to the current state. The functions for updating the state are
* registered with the `onEvent` method of the `Actions`.
* The event handler returns the new state. The state must be immutable, so you return
* a new instance of the state. Current state is passed as parameter to the event handler.
* The same event handlers are also used when the entity is started up to recover its
* state from the stored events.
*
* After persisting an event, external side effects can be performed in the `afterPersist`
* function that can be defined when creating the `Persist` directive.
* A typical side effect is to reply to the request to confirm that it was performed
* successfully. Replies are sent with the `reply` method of the context that is passed
* to the command handler function.
*
* The event handlers are typically only updating the state, but they may also change
* the behavior of the entity in the sense that new functions for processing commands
* and events may be defined for a given state. This is useful when implementing
* finite state machine (FSM) like entities.
*
* When the entity is started the state is recovered by replaying stored events.
* To reduce this recovery time the entity may start the recovery from a snapshot
* of the state and then only replaying the events that were stored after the snapshot.
* Such snapshots are automatically saved after a configured number of persisted events.
*
* @tparam Command the super type of all commands, must implement [[PersistentEntity.ReplyType]]
* to define the reply type of each command type
* @tparam Event the super type of all events
* @tparam State the type of the state
*/
abstract class PersistentEntity {
type Command
type Event
type State
type Behavior = State => Actions
type EventHandler = PartialFunction[(Event, State), State]
private[lagom]type CommandHandler = PartialFunction[(Command, CommandContext[Any], State), Persist]
private[lagom]type ReadOnlyCommandHandler = PartialFunction[(Command, ReadOnlyCommandContext[Any], State), Unit]
private var _entityId: String = _
final protected def entityId: String = _entityId
/**
* INTERNAL API
*/
private[lagom] def internalSetEntityId(id: String) = _entityId = id
/**
* The name of this entity type. It should be unique among the entity
* types of the service. By default it is using the short class name
* of the concrete `PersistentEntity` class. Subclass may override
* to define other type names. It is needed to override and retain
* the original name when the class name is changed because this name
* is part of the key of the store data (it is part of the `persistenceId`
* of the underlying `PersistentActor`).
*/
def entityTypeName: String = Logging.simpleName(getClass)
def initialState: State
/**
* Abstract method that must be implemented by concrete subclass to define
* the behavior of the entity.
*/
def behavior: Behavior
/**
* This method is called to notify the entity that the recovery process
* is finished.
*/
def recoveryCompleted(state: State): State = state
object Actions {
val empty = new Actions(PartialFunction.empty, Map.empty)
def apply(): Actions = empty
}
/**
* Actions consists of functions to process incoming commands
* and persisted events. `Actions` is an immutable class.
*/
class Actions(
val eventHandler: EventHandler,
val commandHandlers: Map[Class[_], CommandHandler]
) extends (State => Actions) {
/**
* Extends `State => Actions` so that it can be used directly in
* [[PersistentEntity#behavior]] when there is only one set of actions
* independent of state.
*/
def apply(state: State): Actions = this
/**
* Add a command handler. For each command class the handler is a
* `PartialFunction`. Adding a handler for a command class that was
* previously defined will replace the previous handler for that class.
* It is possible to combine handlers from two different `Actions` with
* [[#orElse]] method.
*/
def onCommand[C <: Command with PersistentEntity.ReplyType[Reply]: ClassTag, Reply](
handler: PartialFunction[(Command, CommandContext[Reply], State), Persist]
): Actions = {
val commandClass = implicitly[ClassTag[C]].runtimeClass.asInstanceOf[Class[C]]
new Actions(eventHandler, commandHandlers.updated(commandClass, handler.asInstanceOf[CommandHandler]))
}
/**
* Add a command handler that will not persist any events. This is a convenience
* method to [[#onCommand]]. For each command class the handler is a
* `PartialFunction`. Adding a handler for a command class that was
* previously defined will replace the previous handler for that class.
* It is possible to combine handlers from two different `Actions` with
* [[#orElse]] method.
*/
def onReadOnlyCommand[C <: Command with PersistentEntity.ReplyType[Reply]: ClassTag, Reply](
handler: PartialFunction[(Command, ReadOnlyCommandContext[Reply], State), Unit]
): Actions = {
val delegate: PartialFunction[(Command, CommandContext[Reply], State), Persist] = {
case params @ (_, ctx, _) if handler.isDefinedAt(params) =>
handler(params)
ctx.done
}
onCommand[C, Reply](delegate)
}
/**
* Add an event handler. Each handler is a `PartialFunction` and they
* will be tried in the order they were added, i.e. they are combined
* with `orElse`.
*/
def onEvent(handler: EventHandler): Actions = {
new Actions(eventHandler.orElse(handler), commandHandlers)
}
/**
* Append `eventHandler` and `commandHandlers` from `b` to the handlers
* of this `Actions`.
*
* Event handlers are combined with `orElse` of the partial functions.
*
* Command handlers for a specific command class that are defined in
* both `b` and this `Actions` will be combined with `orElse` of the
* partial functions.
*/
def orElse(b: Actions): Actions = {
val commandsInBoth = commandHandlers.keySet intersect b.commandHandlers.keySet
val newCommandHandlers = commandHandlers ++ b.commandHandlers ++
commandsInBoth.map(c => c -> commandHandlers(c).orElse(b.commandHandlers(c)))
new Actions(eventHandler.orElse(b.eventHandler), newCommandHandlers)
}
}
/**
* The context that is used by read-only command handlers.
* Replies are sent with the context.
*
* @tparam R the reply type of the command
*/
abstract class ReadOnlyCommandContext[R] {
/**
* Send reply to a command. The type `R` must be the reply type defined by
* the command.
*/
def reply(msg: R): Unit
/**
* Reply with a negative acknowledgment.
*/
def commandFailed(cause: Throwable): Unit
/**
* Reply with a negative acknowledgment using the standard
* `InvalidCommandException`.
*/
def invalidCommand(message: String): Unit =
commandFailed(new PersistentEntity.InvalidCommandException(message))
}
/**
* The context that is used by command handler function.
* Events are persisted with the context and replies are sent with the context.
*
* @tparam R the reply type of the command
*/
abstract class CommandContext[R] extends ReadOnlyCommandContext[R] {
/**
* A command handler may return this `Persist` directive to define
* that one event is to be persisted. External side effects can be
* performed after successful persist in the `afterPersist` function.
*/
def thenPersist[B <: Event](event: B)(afterPersist: B => Unit = (_: B) => ()): Persist =
PersistOne(event, afterPersist)
/**
* A command handler may return this `Persist` directive to define
* that several events are to be persisted. External side effects can be
* performed after successful persist in the `afterPersist` function.
* `afterPersist` is invoked once when all events have been persisted
* successfully.
*/
def thenPersistAll(events: Event*)(afterPersist: () => Unit = () => ()): Persist =
PersistAll(events.to[immutable.Seq], afterPersist)
/**
* A command handler may return this `Persist` directive to define
* that no events are to be persisted.
*/
def done[B <: Event]: Persist = PersistNone
}
/**
* A command handler returns a `Persist` directive that defines what event or events,
* if any, to persist. Use the `thenPersist`, `thenPersistAll` or `done` methods of the context
* that is passed to the command handler function to create the `Persist` directive.
*/
trait Persist
/**
* INTERNAL API
*/
private[lagom] case class PersistOne[B <: Event](event: B, afterPersist: B => Unit) extends Persist
/**
* INTERNAL API
*/
private[lagom] case class PersistAll(events: immutable.Seq[Event], afterPersist: () => Unit) extends Persist
/**
* INTERNAL API
*/
private[lagom] case object PersistNone extends Persist
}
| edouardKaiser/lagom | persistence/scaladsl/src/main/scala/com/lightbend/lagom/scaladsl/persistence/PersistentEntity.scala | Scala | apache-2.0 | 11,728 |
package bad.robot.radiate
/**
* <ul><li>Busy - Indicate a busy state, this could be used to overlay a 'spinner' to indicate the user should wait. Use when waiting for IO etc. Set to Idle or Error when done.</li></ul>
* <ul><li>Progressing - Indicate a work is progressing, this could be used to overlay a 'progress bar' to indicate progress. Use when executing a fixed period task. Set to Idle or Error when done.</li></ul>
* <ul><li>Idle - Indicate an idle, this could be used dismiss any busy or progress indicators. Use having completed work.</li></ul>
* <ul><li>Error - Indicate an error.
*/
sealed trait Activity
package activity {
case object Busy extends Activity
case object Progressing extends Activity
case object Idle extends Activity
case object Error extends Activity
} | tobyweston/radiate | src/main/scala/bad/robot/radiate/Activity.scala | Scala | apache-2.0 | 797 |
package ch.epfl.bluebrain.nexus.iam.permissions
import java.time.Instant
import java.util.concurrent.TimeUnit
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import cats.Monad
import cats.effect.{Clock, Effect, Timer}
import cats.implicits._
import ch.epfl.bluebrain.nexus.iam.acls.Acls
import ch.epfl.bluebrain.nexus.iam.config.AppConfig.{HttpConfig, PermissionsConfig}
import ch.epfl.bluebrain.nexus.iam.permissions.Permissions._
import ch.epfl.bluebrain.nexus.iam.permissions.PermissionsCommand._
import ch.epfl.bluebrain.nexus.iam.permissions.PermissionsEvent._
import ch.epfl.bluebrain.nexus.iam.permissions.PermissionsRejection._
import ch.epfl.bluebrain.nexus.iam.permissions.PermissionsState.{Current, Initial}
import ch.epfl.bluebrain.nexus.iam.types.IamError.{AccessDenied, UnexpectedInitialState}
import ch.epfl.bluebrain.nexus.iam.types._
import ch.epfl.bluebrain.nexus.rdf.Iri.Path
import ch.epfl.bluebrain.nexus.sourcing.akka.AkkaAggregate
import ch.epfl.bluebrain.nexus.sourcing.retry.Retry
/**
* Permissions API.
*
* @param agg the permissions aggregate
* @param acls a lazy acls api
* @param http the application http configuration
* @tparam F the effect type
*/
class Permissions[F[_]: MonadThrowable](
agg: Agg[F],
acls: F[Acls[F]]
)(implicit http: HttpConfig, pc: PermissionsConfig) {
private val F = implicitly[MonadThrowable[F]]
private val pid = "permissions"
/**
* @return the minimum set of permissions
*/
def minimum: Set[Permission] =
pc.minimum
/**
* @return the current permissions as a resource
*/
def fetch(implicit caller: Caller): F[Resource] =
check(read) >> fetchUnsafe
/**
* @param rev the permissions revision
* @return the permissions as a resource at the specified revision
*/
def fetchAt(rev: Long)(implicit caller: Caller): F[OptResource] =
check(read) >> agg
.foldLeft[State](pid, Initial) {
case (state, event) if event.rev <= rev => next(pc)(state, event)
case (state, _) => state
}
.map {
case Initial if rev != 0L => None
case c: Current if rev != c.rev => None
case other => Some(other.resource)
}
/**
* @return the current permissions collection
*/
def effectivePermissions(implicit caller: Caller): F[Set[Permission]] =
fetch.map(_.value)
/**
* @return the current permissions as a resource without checking permissions
*/
def fetchUnsafe: F[Resource] =
agg.currentState(pid).map(_.resource)
/**
* @return the current permissions collection without checking permissions
*/
def effectivePermissionsUnsafe: F[Set[Permission]] =
fetchUnsafe.map(_.value)
/**
* Replaces the current collection of permissions with the provided collection.
*
* @param permissions the permissions to set
* @param rev the last known revision of the resource
* @return the new resource metadata or a description of why the change was rejected
*/
def replace(permissions: Set[Permission], rev: Long = 0L)(implicit caller: Caller): F[MetaOrRejection] =
check(write) >> eval(ReplacePermissions(rev, permissions, caller.subject))
/**
* Appends the provided permissions to the current collection of permissions.
*
* @param permissions the permissions to append
* @param rev the last known revision of the resource
* @return the new resource metadata or a description of why the change was rejected
*/
def append(permissions: Set[Permission], rev: Long = 0L)(implicit caller: Caller): F[MetaOrRejection] =
check(write) >> eval(AppendPermissions(rev, permissions, caller.subject))
/**
* Subtracts the provided permissions to the current collection of permissions.
*
* @param permissions the permissions to subtract
* @param rev the last known revision of the resource
* @return the new resource metadata or a description of why the change was rejected
*/
def subtract(permissions: Set[Permission], rev: Long)(implicit caller: Caller): F[MetaOrRejection] =
check(write) >> eval(SubtractPermissions(rev, permissions, caller.subject))
/**
* Removes all but the minimum permissions from the collection of permissions.
*
* @param rev the last known revision of the resource
* @return the new resource metadata or a description of why the change was rejected
*/
def delete(rev: Long)(implicit caller: Caller): F[MetaOrRejection] =
check(write) >> eval(DeletePermissions(rev, caller.subject))
private def eval(cmd: Command): F[MetaOrRejection] =
agg
.evaluateS(pid, cmd)
.flatMap {
case Left(rej) => F.pure(Left(rej))
// $COVERAGE-OFF$
case Right(Initial) => F.raiseError(UnexpectedInitialState(id))
// $COVERAGE-ON$
case Right(c: Current) => F.pure(Right(c.resourceMetadata))
}
private def check(permission: Permission)(implicit caller: Caller): F[Unit] =
acls
.flatMap(_.hasPermission(Path./, permission, ancestors = false))
.ifM(F.unit, F.raiseError(AccessDenied(id, permission)))
}
object Permissions {
/**
* Constructs a new permissions aggregate.
*/
def aggregate[F[_]: Effect: Timer](
implicit as: ActorSystem,
mt: ActorMaterializer,
pc: PermissionsConfig
): F[Agg[F]] =
AkkaAggregate.sharded[F](
"permissions",
PermissionsState.Initial,
next(pc),
evaluate[F](pc),
pc.sourcing.passivationStrategy(),
Retry(pc.sourcing.retry.retryStrategy),
pc.sourcing.akkaSourcingConfig,
pc.sourcing.shards
)
/**
* Creates a new permissions api using the provided aggregate and a lazy reference to the ACL api.
*
* @param agg the permissions aggregate
* @param acls a lazy reference to the ACL api
*/
def apply[F[_]: MonadThrowable](agg: Agg[F], acls: F[Acls[F]])(
implicit
http: HttpConfig,
pc: PermissionsConfig
): Permissions[F] =
new Permissions(agg, acls)
/**
* Creates a new permissions api using the default aggregate and a lazy reference to the ACL api.
*
* @param acls a lazy reference to the ACL api
*/
def apply[F[_]: Effect: Timer](acls: F[Acls[F]])(
implicit
as: ActorSystem,
mt: ActorMaterializer,
http: HttpConfig,
pc: PermissionsConfig
): F[Permissions[F]] =
delay(aggregate, acls)
/**
* Creates a new permissions api using the provided aggregate and a lazy reference to the ACL api.
*
* @param agg a lazy reference to the permissions aggregate
* @param acls a lazy reference to the ACL api
*/
def delay[F[_]: MonadThrowable](agg: F[Agg[F]], acls: F[Acls[F]])(
implicit
http: HttpConfig,
pc: PermissionsConfig
): F[Permissions[F]] =
agg.map(apply(_, acls))
private[permissions] def next(pc: PermissionsConfig)(state: State, event: Event): State = {
implicit val p: PermissionsConfig = pc
def appended(e: PermissionsAppended): State = state match {
case s: Initial if e.rev == 1L => s.withPermissions(e.permissions, e.instant, e.subject)
case s: Current if s.rev + 1 == e.rev => s.withPermissions(s.permissions ++ e.permissions, e.instant, e.subject)
case other => other
}
def replaced(e: PermissionsReplaced): State = state match {
case s if s.rev + 1 == e.rev => s.withPermissions(e.permissions, e.instant, e.subject)
case other => other
}
def subtracted(e: PermissionsSubtracted): State = state match {
case s: Current if s.rev + 1 == e.rev => s.withPermissions(s.permissions -- e.permissions, e.instant, e.subject)
case other => other
}
def deleted(e: PermissionsDeleted): State = state match {
case s: Current if s.rev + 1 == e.rev => s.withPermissions(Set.empty, e.instant, e.subject)
case other => other
}
event match {
case e: PermissionsAppended => appended(e)
case e: PermissionsReplaced => replaced(e)
case e: PermissionsSubtracted => subtracted(e)
case e: PermissionsDeleted => deleted(e)
}
}
private def evaluate[F[_]: Monad: Clock](pc: PermissionsConfig)(state: State, cmd: Command): F[EventOrRejection] = {
val F = implicitly[Monad[F]]
val C = implicitly[Clock[F]]
def accept(f: Instant => PermissionsEvent): F[EventOrRejection] =
C.realTime(TimeUnit.MILLISECONDS).map(rtl => Right(f(Instant.ofEpochMilli(rtl))))
def reject(rejection: PermissionsRejection): F[EventOrRejection] =
F.pure(Left(rejection))
def replace(c: ReplacePermissions): F[EventOrRejection] =
if (c.rev != state.rev) reject(IncorrectRev(c.rev, state.rev))
else if (c.permissions.isEmpty) reject(CannotReplaceWithEmptyCollection)
else if (c.permissions -- pc.minimum isEmpty) reject(CannotReplaceWithEmptyCollection)
else accept(PermissionsReplaced(c.rev + 1, c.permissions, _, c.subject))
def append(c: AppendPermissions): F[EventOrRejection] = state match {
case _ if state.rev != c.rev => reject(IncorrectRev(c.rev, state.rev))
case _ if c.permissions.isEmpty => reject(CannotAppendEmptyCollection)
case Initial => accept(PermissionsAppended(1L, c.permissions, _, c.subject))
case s: Current =>
val appended = c.permissions -- s.permissions
if (appended.isEmpty) reject(CannotAppendEmptyCollection)
else accept(PermissionsAppended(c.rev + 1, c.permissions, _, c.subject))
}
def subtract(c: SubtractPermissions): F[EventOrRejection] = state match {
case _ if state.rev != c.rev => reject(IncorrectRev(c.rev, state.rev))
case _ if c.permissions.isEmpty => reject(CannotSubtractEmptyCollection)
case Initial => reject(CannotSubtractFromMinimumCollection(pc.minimum))
case s: Current =>
val intendedDelta = c.permissions -- s.permissions
val delta = c.permissions & s.permissions
val subtracted = delta -- pc.minimum
if (intendedDelta.nonEmpty) reject(CannotSubtractUndefinedPermissions(intendedDelta))
else if (subtracted.isEmpty) reject(CannotSubtractFromMinimumCollection(pc.minimum))
else accept(PermissionsSubtracted(c.rev + 1, delta, _, c.subject))
}
def delete(c: DeletePermissions): F[EventOrRejection] = state match {
case _ if state.rev != c.rev => reject(IncorrectRev(c.rev, state.rev))
case Initial => reject(CannotDeleteMinimumCollection)
case s: Current if s.permissions == pc.minimum => reject(CannotDeleteMinimumCollection)
case _: Current => accept(PermissionsDeleted(c.rev + 1, _, c.subject))
}
cmd match {
case c: ReplacePermissions => replace(c)
case c: AppendPermissions => append(c)
case c: SubtractPermissions => subtract(c)
case c: DeletePermissions => delete(c)
}
}
}
| hygt/nexus-iam | src/main/scala/ch/epfl/bluebrain/nexus/iam/permissions/Permissions.scala | Scala | apache-2.0 | 11,193 |
package scala.collection.immutable
import java.{util => ju}
import org.junit.Assert.{assertEquals, assertTrue}
import org.junit.Test
import scala.collection.immutable.ChampMapSmokeTest.mkTuple
object ChampSetSmokeTest {
private def emptySet: Set[CustomHashInt] =
Set.empty[CustomHashInt]
private def setOf(item: CustomHashInt): Set[CustomHashInt] =
emptySet + item
private def setOf(item0: CustomHashInt, item1: CustomHashInt): Set[CustomHashInt] =
emptySet + item0 + item1
private def setOf(item0: CustomHashInt, item1: CustomHashInt, item2: CustomHashInt): Set[CustomHashInt] =
emptySet + item0 + item1 + item2
def mkValue(value: Int) = new CustomHashInt(value, value)
def mkValue(value: Int, hash: Int) = new CustomHashInt(value, hash)
}
class ChampSetSmokeTest {
import ChampSetSmokeTest._
@Test def testNodeValNode(): Unit = {
val input = new ju.LinkedHashMap[Integer, Integer]
input.put(1, 1)
input.put(2, 33)
input.put(3, 3)
input.put(4, 4)
input.put(5, 4)
input.put(6, 6)
input.put(7, 7)
input.put(8, 7)
var set: Set[CustomHashInt] = emptySet
input.forEach((key, value) => set = set + mkValue(key, value))
input.forEach((key, value) => assertTrue(set.contains(mkValue(key, value))))
}
@Test def testValNodeVal(): Unit = {
val input = new ju.LinkedHashMap[Integer, Integer]
input.put(1, 1)
input.put(2, 2)
input.put(3, 2)
input.put(4, 4)
input.put(5, 5)
input.put(6, 5)
input.put(7, 7)
var set: Set[CustomHashInt] = emptySet
input.forEach((key, value) => set = set + mkValue(key, value))
input.forEach((key, value) => assertTrue(set.contains(mkValue(key, value))))
}
@Test def testIteration(): Unit = {
val input = new ju.LinkedHashMap[Integer, Integer]
input.put(1, 1)
input.put(2, 2)
input.put(3, 2)
input.put(4, 4)
input.put(5, 5)
input.put(6, 5)
input.put(7, 7)
var set: Set[CustomHashInt] = emptySet
input.forEach((key, value) => set = set + mkValue(key, value))
set.foreach(item => input.remove(item.value))
assertTrue(input.isEmpty)
}
@Test def IterateWithLastBitsDifferent(): Unit = {
val hash_n2147483648_obj1 = mkValue(1, -2147483648)
val hash_p1073741824_obj2 = mkValue(2, 1073741824)
val todo: ju.Set[CustomHashInt] = new ju.HashSet[CustomHashInt]
todo.add(hash_n2147483648_obj1)
todo.add(hash_p1073741824_obj2)
val xs: Set[CustomHashInt] = setOf(hash_n2147483648_obj1, hash_p1073741824_obj2)
xs.forall(todo.remove)
assertEquals(ju.Collections.EMPTY_SET, todo)
}
@Test def TwoCollisionsEquals(): Unit = {
val hash98304_obj1 = mkValue(1, 98304)
val hash98304_obj2 = mkValue(2, 98304)
val xs: Set[CustomHashInt] = setOf(hash98304_obj1, hash98304_obj2)
val ys: Set[CustomHashInt] = setOf(hash98304_obj2, hash98304_obj1)
assertEquals(xs, ys)
}
@Test def ThreeCollisionsEquals(): Unit = {
val hash98304_obj1 = mkValue(1, 98304)
val hash98304_obj2 = mkValue(2, 98304)
val hash98304_obj3 = mkValue(3, 98304)
val xs: Set[CustomHashInt] = setOf(hash98304_obj1, hash98304_obj2, hash98304_obj3)
val ys: Set[CustomHashInt] = setOf(hash98304_obj3, hash98304_obj2, hash98304_obj1)
assertEquals(xs, ys)
}
@Test def RemovalFromCollisionNodeEqualsSingleton(): Unit = {
val hash98304_obj1 = mkValue(1, 98304)
val hash98304_obj2 = mkValue(2, 98304)
val xs: Set[CustomHashInt] = setOf(hash98304_obj1)
val ys: Set[CustomHashInt] = setOf(hash98304_obj1, hash98304_obj2) - hash98304_obj2
assertEquals(xs, ys)
}
@Test def CollisionIterate(): Unit = {
val hash98304_obj1 = mkValue(1, 98304)
val hash98304_obj2 = mkValue(2, 98304)
val todo: ju.Set[CustomHashInt] = new ju.HashSet[CustomHashInt]
todo.add(hash98304_obj1)
todo.add(hash98304_obj2)
val xs: Set[CustomHashInt] = setOf(hash98304_obj1, hash98304_obj2)
xs.forall(todo.remove)
assertEquals(ju.Collections.EMPTY_SET, todo)
}
@Test def CollisionWithMergeInlineAbove1(): Unit = {
val hash98304_obj1 = mkValue(1, 98304)
val hash98304_obj2 = mkValue(2, 98304)
val hash268435456_obj3 = mkValue(3, 268435456)
val xs: Set[CustomHashInt] = setOf(hash98304_obj1, hash98304_obj2, hash268435456_obj3) - hash268435456_obj3
val ys: Set[CustomHashInt] = setOf(hash98304_obj1, hash98304_obj2)
assertEquals(xs, ys)
}
@Test def CollisionWithMergeInlineAbove1_2(): Unit = {
val hash8_obj1 = mkValue(1, 8)
val hash8_obj2 = mkValue(2, 8)
val hash268435456_obj3 = mkValue(3, 268435456)
val xs: Set[CustomHashInt] = setOf(hash8_obj1, hash8_obj2, hash268435456_obj3) - hash268435456_obj3
val ys: Set[CustomHashInt] = setOf(hash8_obj1, hash8_obj2)
assertEquals(xs, ys)
}
@Test def CollisionWithMergeInlineAbove2(): Unit = {
val hash98304_obj1 = mkValue(1, 98304)
val hash98304_obj2 = mkValue(2, 98304)
val hash268435456_obj3 = mkValue(3, 268435456)
val xs: Set[CustomHashInt] = setOf(hash98304_obj1, hash268435456_obj3, hash98304_obj2) - hash268435456_obj3
val ys: Set[CustomHashInt] = setOf(hash98304_obj1, hash98304_obj2)
assertEquals(xs, ys)
}
@Test def CollisionWithMergeInlineAbove2_2(): Unit = {
val hash8_obj1 = mkValue(1, 8)
val hash8_obj2 = mkValue(2, 8)
val hash268435456_obj3 = mkValue(3, 268435456)
val xs: Set[CustomHashInt] = setOf(hash8_obj1, hash268435456_obj3, hash8_obj2) - hash268435456_obj3
val ys: Set[CustomHashInt] = setOf(hash8_obj1, hash8_obj2)
assertEquals(xs, ys)
}
@Test def CollisionWithMergeInlineAbove1RemoveOneCollisonNode(): Unit = {
val hash98304_obj1 = mkValue(1, 98304)
val hash98304_obj2 = mkValue(2, 98304)
val hash268435456_obj3 = mkValue(3, 268435456)
val xs: Set[CustomHashInt] = setOf(hash98304_obj1, hash98304_obj2, hash268435456_obj3) - hash98304_obj2
val ys: Set[CustomHashInt] = setOf(hash98304_obj1, hash268435456_obj3)
assertEquals(xs, ys)
}
@Test def CollisionWithMergeInlineAbove2RemoveOneCollisonNode(): Unit = {
val hash98304_obj1 = mkValue(1, 98304)
val hash98304_obj2 = mkValue(2, 98304)
val hash268435456_obj3 = mkValue(3, 268435456)
val xs: Set[CustomHashInt] = setOf(hash98304_obj1, hash268435456_obj3, hash98304_obj2) - hash98304_obj2
val ys: Set[CustomHashInt] = setOf(hash98304_obj1, hash268435456_obj3)
assertEquals(xs, ys)
}
@Test def CollisionWithMergeInlineBelow1(): Unit = {
val hash98304_obj1 = mkValue(1, 98304)
val hash98304_obj2 = mkValue(2, 98304)
val hash8_obj3 = mkValue(3, 8)
val xs: Set[CustomHashInt] = setOf(hash98304_obj1, hash98304_obj2, hash8_obj3) - hash8_obj3
val ys: Set[CustomHashInt] = setOf(hash98304_obj1, hash98304_obj2)
assertEquals(xs, ys)
}
@Test def CollisionWithMergeInlineBelow2(): Unit = {
val hash98304_obj1 = mkValue(1, 98304)
val hash98304_obj2 = mkValue(2, 98304)
val hash8_obj3 = mkValue(3, 8)
val xs: Set[CustomHashInt] = setOf(hash98304_obj1, hash8_obj3, hash98304_obj2) - hash8_obj3
val ys: Set[CustomHashInt] = setOf(hash98304_obj1, hash98304_obj2)
assertEquals(xs, ys)
}
@Test def CollisionWithMergeInlineBelowRemoveOneCollisonNode1(): Unit = {
val hash98304_obj1 = mkValue(1, 98304)
val hash98304_obj2 = mkValue(2, 98304)
val hash8_obj3 = mkValue(3, 8)
val xs: Set[CustomHashInt] = setOf(hash98304_obj1, hash98304_obj2, hash8_obj3) - hash98304_obj2
val ys: Set[CustomHashInt] = setOf(hash98304_obj1, hash8_obj3)
assertEquals(xs, ys)
}
@Test def CollisionWithMergeInlineBelowRemoveOneCollisonNode2(): Unit = {
val hash98304_obj1 = mkValue(1, 98304)
val hash98304_obj2 = mkValue(2, 98304)
val hash8_obj3 = mkValue(3, 8)
val xs: Set[CustomHashInt] = setOf(hash98304_obj1, hash8_obj3, hash98304_obj2) - hash98304_obj2
val ys: Set[CustomHashInt] = setOf(hash98304_obj1, hash8_obj3)
assertEquals(xs, ys)
}
object O1 { override def hashCode = 1 ; override def toString = "O1"}
class C(val i: Int) { override def hashCode = i % 4 ; override def toString = s"C($i)" }
val cs = Array.tabulate(4096)(new C(_))
private def assertSameEqHash(expected: HashSet[Any], actual: HashSet[Any]) = {
assertEquals(List.from(actual).size, actual.size)
assertEquals(expected.size, actual.size)
assertEquals(expected.hashCode(), actual.hashCode())
}
@Test def testCachedSizeAndHashCode(): Unit = {
val emptySet = HashSet.empty[Any]
var set: HashSet[Any] = emptySet + O1
assertEquals(1, set.size)
set = set + O1
assertSameEqHash(emptySet + O1, set)
}
@Test def testCachedSizeAndHashCodeCollision(): Unit = {
val emptySet = HashSet.empty[Any]
var set: HashSet[Any] = emptySet
for (c <- cs)
set = set + c
var set1 = set
for (c <- cs) {
set1 = set1 + c
assertEquals(set.rootNode.cachedJavaKeySetHashCode, set1.rootNode.cachedJavaKeySetHashCode)
if (c.i % 41 == 0)
assertEquals(set, set1)
}
assertEquals(set, set1)
assertSameEqHash(set1, set)
var set2 = set + mkTuple(O1, "O1_V2")
set2 = set2 + mkTuple(O1, "O1_V2")
assertSameEqHash(set1 + mkTuple(O1, "O1_V2"), set2)
}
@Test def hashCodeCheck(): Unit = {
assertEquals(-1075495872, collection.immutable.HashSet(1).hashCode())
}
}
| lrytz/scala | test/junit/scala/collection/immutable/ChampSetSmokeTest.scala | Scala | apache-2.0 | 9,371 |
/*
* Copyright 2011-2018 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.cdevreeze.nta.ntarule.rules_2_02
import java.net.URI
import scala.collection.immutable
import eu.cdevreeze.nta.common.taxonomy.Taxonomy
import eu.cdevreeze.nta.common.validator.Result
import eu.cdevreeze.nta.common.validator.TaxonomyDocumentValidator
import eu.cdevreeze.nta.common.validator.TaxonomyValidatorFactory
import eu.cdevreeze.nta.common.validator.ValidationScope
import eu.cdevreeze.nta.ntarule.NtaRuleConfigWrapper
import eu.cdevreeze.nta.ntarule.NtaRules
import eu.cdevreeze.tqa.ENames
import eu.cdevreeze.tqa.base.dom.TaxonomyDocument
import eu.cdevreeze.tqa.base.dom.XsdSchema
/**
* Validator of rule 2.02.00.09. The rule says that the the schema document must have a @attributeFormDefault attribute
* with value unqualified and a @elementFormDefault attribute with value qualified.
*
* @author Chris de Vreeze
*/
final class Validator_2_02_00_09(val excludedDocumentUris: Set[URI]) extends TaxonomyDocumentValidator {
def ruleName: String = NtaRules.extractRuleName(getClass)
def validateDocument(
doc: TaxonomyDocument,
taxonomy: Taxonomy,
validationScope: ValidationScope): immutable.IndexedSeq[Result] = {
require(isTypeOfDocumentToValidate(doc, taxonomy), s"Document ${doc.uri} should not be validated")
val elementFormDefaultOption = doc.documentElement.attributeOption(ENames.ElementFormDefaultEName)
val attributeFormDefaultOption = doc.documentElement.attributeOption(ENames.AttributeFormDefaultEName)
val elementFormErrors =
if (elementFormDefaultOption.isEmpty) {
immutable.IndexedSeq(Result.makeErrorResult(
ruleName,
"missing-element-form-default",
s"Missing elementFormDefault in '${doc.uri}'"))
} else if (elementFormDefaultOption.contains("qualified")) {
immutable.IndexedSeq()
} else {
immutable.IndexedSeq(Result.makeErrorResult(
ruleName,
"wrong-element-form-default",
s"Wrong elementFormDefault in '${doc.uri}'"))
}
val attributeFormErrors =
if (attributeFormDefaultOption.isEmpty) {
immutable.IndexedSeq(Result.makeErrorResult(
ruleName,
"missing-attribute-form-default",
s"Missing attributeFormDefault in '${doc.uri}'"))
} else if (attributeFormDefaultOption.contains("unqualified")) {
immutable.IndexedSeq()
} else {
immutable.IndexedSeq(Result.makeErrorResult(
ruleName,
"wrong-attribute-form-default",
s"Wrong attributeFormDefault in '${doc.uri}'"))
}
elementFormErrors ++ attributeFormErrors
}
def isTypeOfDocumentToValidate(doc: TaxonomyDocument, taxonomy: Taxonomy): Boolean = {
doc.documentElement.isInstanceOf[XsdSchema]
}
}
object Validator_2_02_00_09 extends TaxonomyValidatorFactory {
type Validator = Validator_2_02_00_09
type CfgWrapper = NtaRuleConfigWrapper
def ruleName: String = {
NtaRules.extractRuleName(classOf[Validator_2_02_00_09])
}
def create(configWrapper: NtaRuleConfigWrapper): Validator_2_02_00_09 = {
new Validator_2_02_00_09(
configWrapper.excludedDocumentUrisForRule(ruleName))
}
}
| dvreeze/nta | src/main/scala/eu/cdevreeze/nta/ntarule/rules_2_02/Validator_2_02_00_09.scala | Scala | apache-2.0 | 3,776 |
package scalation
/** The state package contains classes, traits and objects for
* state-oriented simulation models (for example, Markov Chains).
*/
package object state { }
| mvnural/scalation | src/main/scala/scalation/state/package.scala | Scala | mit | 181 |
package uk.co.morleydev.zander.client.test.util
import com.github.kristofa.test.http.{MockHttpServer, AbstractHttpResponseProvider}
import uk.co.morleydev.zander.client.test.gen.GenNative
import java.net.BindException
class MockServerAndPort(val server : MockHttpServer, val port : Int) extends AutoCloseable {
override def close(): Unit = {
server.stop()
}
}
object CreateMockHttpServer extends (AbstractHttpResponseProvider => MockServerAndPort) {
override def apply(provider: AbstractHttpResponseProvider): MockServerAndPort = synchronized {
var mockHttpServer : MockHttpServer = null
val mockPort = Iterator.continually(GenNative.genInt(8000, 60000))
.dropWhile(port => {
try {
mockHttpServer = new MockHttpServer(port, provider)
mockHttpServer.start()
false
} catch {
case e : BindException => true
}
}).take(1).toList.head
new MockServerAndPort(mockHttpServer, mockPort)
}
}
| MorleyDev/zander.client | src/test/scala/uk/co/morleydev/zander/client/test/util/CreateMockHttpServer.scala | Scala | mit | 970 |
package com.twitter.finagle.client
import com.twitter.finagle._
import com.twitter.finagle.factory.{
BindingFactory, NamerTracingFilter, RefcountedFactory, StatsFactoryWrapper, TimeoutFactory}
import com.twitter.finagle.filter.{DtabStatsFilter, ExceptionSourceFilter, MonitorFilter}
import com.twitter.finagle.loadbalancer.LoadBalancerFactory
import com.twitter.finagle.param._
import com.twitter.finagle.service._
import com.twitter.finagle.stack.Endpoint
import com.twitter.finagle.stack.nilStack
import com.twitter.finagle.stats.ClientStatsReceiver
import com.twitter.finagle.tracing._
import com.twitter.finagle.transport.Transport
import com.twitter.finagle.util.Showable
import com.twitter.util.{Future, Var}
object StackClient {
/**
* Canonical Roles for each Client-related Stack modules.
*/
object Role extends Stack.Role("StackClient"){
val pool = Stack.Role("Pool")
val requestDraining = Stack.Role("RequestDraining")
val prepFactory = Stack.Role("PrepFactory")
/** PrepConn is special in that it's the first role before the `Endpoint` role */
val prepConn = Stack.Role("PrepConn")
val protoTracing = Stack.Role("protoTracing")
}
/**
* A [[com.twitter.finagle.Stack]] representing an endpoint.
* Note that this is terminated by a [[com.twitter.finagle.service.FailingFactory]]:
* users are expected to terminate it with a concrete service factory.
*
* @see [[com.twitter.finagle.service.ExpiringService]]
* @see [[com.twitter.finagle.service.FailFastFactory]]
* @see [[com.twitter.finagle.client.DefaultPool]]
* @see [[com.twitter.finagle.service.TimeoutFilter]]
* @see [[com.twitter.finagle.service.FailureAccrualFactory]]
* @see [[com.twitter.finagle.service.StatsServiceFactory]]
* @see [[com.twitter.finagle.service.StatsFilter]]
* @see [[com.twitter.finagle.filter.DtabStatsFilter]]
* @see [[com.twitter.finagle.tracing.ClientDestTracingFilter]]
* @see [[com.twitter.finagle.filter.MonitorFilter]]
* @see [[com.twitter.finagle.filter.ExceptionSourceFilter]]
*/
def endpointStack[Req, Rep]: Stack[ServiceFactory[Req, Rep]] = {
// Ensure that we have performed global initialization.
com.twitter.finagle.Init()
val stk = new StackBuilder[ServiceFactory[Req, Rep]](nilStack[Req, Rep])
stk.push(Role.prepConn, identity[ServiceFactory[Req, Rep]](_))
stk.push(ExpiringService.module)
stk.push(FailFastFactory.module)
stk.push(DefaultPool.module)
stk.push(TimeoutFilter.module)
stk.push(FailureAccrualFactory.module)
stk.push(StatsServiceFactory.module)
stk.push(StatsFilter.module)
stk.push(DtabStatsFilter.module)
stk.push(ClientDestTracingFilter.module)
stk.push(MonitorFilter.module)
stk.push(ExceptionSourceFilter.module)
stk.result
}
/**
* Creates a default finagle client [[com.twitter.finagle.Stack]].
* The stack can be configured via [[com.twitter.finagle.Stack.Param]]'s
* in the finagle package object ([[com.twitter.finagle.param]]) and specific
* params defined in the companion objects of the respective modules.
*
* @see [[com.twitter.finagle.client.StackClient#endpointStack]]
* @see [[com.twitter.finagle.loadbalancer.LoadBalancerFactory]]
* @see [[com.twitter.finagle.factory.BindingFactory]]
* @see [[com.twitter.finagle.factory.RefcountedFactory]]
* @see [[com.twitter.finagle.factory.TimeoutFactory]]
* @see [[com.twitter.finagle.factory.StatsFactoryWrapper]]
* @see [[com.twitter.finagle.FactoryToService]]
* @see [[com.twitter.finagle.tracing.ClientTracingFilter]]
* @see [[com.twitter.finagle.tracing.TraceInitializerFilter]]
*/
def newStack[Req, Rep]: Stack[ServiceFactory[Req, Rep]] = {
val stk = new StackBuilder(endpointStack[Req, Rep])
stk.push(LoadBalancerFactory.module)
stk.push(NamerTracingFilter.module)
stk.push(BindingFactory.module)
stk.push(Role.requestDraining, (fac: ServiceFactory[Req, Rep]) =>
new RefcountedFactory(fac))
stk.push(TimeoutFactory.module)
stk.push(StatsFactoryWrapper.module)
stk.push(Role.prepFactory, identity[ServiceFactory[Req, Rep]](_))
stk.push(FactoryToService.module)
stk.push(Role.protoTracing, identity[ServiceFactory[Req, Rep]](_))
stk.push(ClientTracingFilter.module)
// The TraceInitializerFilter must be pushed after most other modules so that
// any Tracing produced by those modules is enclosed in the appropriate
// span.
stk.push(TraceInitializerFilter.clientModule)
stk.result
}
/**
* The default params used for client stacks.
*/
val defaultParams: Stack.Params = Stack.Params.empty + Stats(ClientStatsReceiver)
}
/**
* A [[com.twitter.finagle.Client Client]] that composes a
* [[com.twitter.finagle.Stack Stack]].
*/
trait StackClient[Req, Rep]
extends Client[Req, Rep]
with Stack.Parameterized[StackClient[Req, Rep]] {
/** The current stack. */
def stack: Stack[ServiceFactory[Req, Rep]]
/** The current parameter map. */
def params: Stack.Params
/** A new StackClient with the provided stack. */
def withStack(stack: Stack[ServiceFactory[Req, Rep]]): StackClient[Req, Rep]
}
/**
* The standard template implementation for
* [[com.twitter.finagle.client.StackClient]].
*
*/
trait StdStackClient[Req, Rep, This <: StdStackClient[Req, Rep, This]]
extends StackClient[Req, Rep] { self =>
protected type In
protected type Out
/**
* Defines a typed [[com.twitter.finagle.client.Transporter]] for this client.
* Concrete StackClient implementations are expected to specify this.
*/
protected def newTransporter(): Transporter[In, Out]
/**
* Defines a dispatcher, a function which reconciles the stream based
* `Transport` with a Request/Response oriented `Service`.
* Together with a `Transporter`, it forms the foundation of a
* finagle client. Concrete implementations are expected to specify this.
*
* @see [[com.twitter.finagle.dispatch.GenSerialServerDispatcher]]
*/
protected def newDispatcher(transport: Transport[In, Out]): Service[Req, Rep]
def withStack(stack: Stack[ServiceFactory[Req, Rep]]): This =
copy1(stack = stack)
/**
* Creates a new StackClient with `f` applied to `stack`.
*
* For expert users only.
*/
def transformed(f: Stack[ServiceFactory[Req, Rep]] => Stack[ServiceFactory[Req, Rep]]): This =
copy1(stack = f(stack))
/**
* Creates a new StackClient with parameter `p`.
*/
override def configured[P: Stack.Param](p: P): This =
withParams(params+p)
/**
* Creates a new StackClient with `params` used to configure this StackClient's `stack`.
*/
def withParams(params: Stack.Params): This =
copy1(params = params)
/**
* A copy constructor in lieu of defining StackClient as a
* case class.
*/
protected def copy1(
stack: Stack[ServiceFactory[Req, Rep]] = this.stack,
params: Stack.Params = this.params): This { type In = self.In; type Out = self.Out }
/**
* A stackable module that creates new `Transports` (via transporter)
* when applied.
*/
protected def endpointer: Stackable[ServiceFactory[Req, Rep]] =
new Stack.Module[ServiceFactory[Req, Rep]] {
val role = Endpoint
val description = "Send requests over the wire"
val parameters = Seq(implicitly[Stack.Param[Transporter.EndpointAddr]])
def make(prms: Stack.Params, next: Stack[ServiceFactory[Req, Rep]]) = {
val Transporter.EndpointAddr(addr) = prms[Transporter.EndpointAddr]
val endpointClient = copy1(params=prms)
val transporter = endpointClient.newTransporter()
Stack.Leaf(this, ServiceFactory(() => transporter(addr).map(endpointClient.newDispatcher)))
}
}
def newClient(dest: Name, label0: String): ServiceFactory[Req, Rep] = {
val Stats(stats) = params[Stats]
val Label(label1) = params[Label]
// For historical reasons, we have two sources for identifying
// a client. The most recently set `label0` takes precedence.
val clientLabel = (label0, label1) match {
case ("", "") => Showable.show(dest)
case ("", l1) => l1
case (l0, l1) => l0
}
val clientStack = stack ++ (endpointer +: nilStack)
val clientParams = (params +
Label(clientLabel) +
Stats(stats.scope(clientLabel)) +
BindingFactory.Dest(dest))
// for the benefit of ClientRegistry.expAllRegisteredClientsResolved
// which waits for these to become non-Pending
val va =
dest match {
case Name.Bound(va) => va
case Name.Path(path) => Namer.resolve(path)
}
ClientRegistry.register(clientLabel, Showable.show(dest), clientStack,
clientParams + LoadBalancerFactory.Dest(va))
clientStack.make(clientParams)
}
override def newService(dest: Name, label: String): Service[Req, Rep] = {
val client = copy1(
params = params + FactoryToService.Enabled(true)
).newClient(dest, label)
new FactoryToService[Req, Rep](client)
}
}
| kristofa/finagle | finagle-core/src/main/scala/com/twitter/finagle/client/StackClient.scala | Scala | apache-2.0 | 9,032 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.action.async.ws
import io.gatling.http.action.async.{ AsyncEvent, AsyncTx, UserAction }
import io.gatling.http.check.async.AsyncCheck
import io.gatling.core.session.Session
import akka.actor.ActorRef
import io.gatling.core.action.Action
import org.asynchttpclient.ws.WebSocket
sealed trait WsEvent extends AsyncEvent
case class OnOpen(tx: AsyncTx, webSocket: WebSocket, time: Long) extends WsEvent
case class OnTextMessage(message: String, time: Long) extends WsEvent
case class OnByteMessage(message: Array[Byte], time: Long) extends WsEvent
case class OnClose(status: Int, reason: String, time: Long) extends WsEvent
sealed trait WsUserAction extends UserAction with WsEvent
case class Send(requestName: String, message: WsMessage, check: Option[AsyncCheck], next: Action, session: Session) extends WsUserAction
sealed trait WsMessage
case class BinaryMessage(message: Array[Byte]) extends WsMessage
case class TextMessage(message: String) extends WsMessage
| MykolaB/gatling | gatling-http/src/main/scala/io/gatling/http/action/async/ws/WsEvents.scala | Scala | apache-2.0 | 1,606 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples
import org.apache.commons.math3.linear._
/**
* Alternating least squares matrix factorization.
*
* This is an example implementation for learning how to use Spark. For more conventional use,
* please refer to org.apache.spark.ml.recommendation.ALS.
*/
object LocalALS {
// Parameters set through command line arguments
var M = 0 // Number of movies
var U = 0 // Number of users
var F = 0 // Number of features
var ITERATIONS = 0
val LAMBDA = 0.01 // Regularization coefficient
def generateR(): RealMatrix = {
val mh = randomMatrix(M, F)
val uh = randomMatrix(U, F)
mh.multiply(uh.transpose())
}
def rmse(targetR: RealMatrix, ms: Array[RealVector], us: Array[RealVector]): Double = {
val r = new Array2DRowRealMatrix(M, U)
for (i <- 0 until M; j <- 0 until U) {
r.setEntry(i, j, ms(i).dotProduct(us(j)))
}
val diffs = r.subtract(targetR)
var sumSqs = 0.0
for (i <- 0 until M; j <- 0 until U) {
val diff = diffs.getEntry(i, j)
sumSqs += diff * diff
}
math.sqrt(sumSqs / (M.toDouble * U.toDouble))
}
def updateMovie(i: Int, m: RealVector, us: Array[RealVector], R: RealMatrix) : RealVector = {
var XtX: RealMatrix = new Array2DRowRealMatrix(F, F)
var Xty: RealVector = new ArrayRealVector(F)
// For each user that rated the movie
for (j <- 0 until U) {
val u = us(j)
// Add u * u^t to XtX
XtX = XtX.add(u.outerProduct(u))
// Add u * rating to Xty
Xty = Xty.add(u.mapMultiply(R.getEntry(i, j)))
}
// Add regularization coefficients to diagonal terms
for (d <- 0 until F) {
XtX.addToEntry(d, d, LAMBDA * U)
}
// Solve it with Cholesky
new CholeskyDecomposition(XtX).getSolver.solve(Xty)
}
def updateUser(j: Int, u: RealVector, ms: Array[RealVector], R: RealMatrix) : RealVector = {
var XtX: RealMatrix = new Array2DRowRealMatrix(F, F)
var Xty: RealVector = new ArrayRealVector(F)
// For each movie that the user rated
for (i <- 0 until M) {
val m = ms(i)
// Add m * m^t to XtX
XtX = XtX.add(m.outerProduct(m))
// Add m * rating to Xty
Xty = Xty.add(m.mapMultiply(R.getEntry(i, j)))
}
// Add regularization coefficients to diagonal terms
for (d <- 0 until F) {
XtX.addToEntry(d, d, LAMBDA * M)
}
// Solve it with Cholesky
new CholeskyDecomposition(XtX).getSolver.solve(Xty)
}
def showWarning() {
System.err.println(
"""WARN: This is a naive implementation of ALS and is given as an example!
|Please use org.apache.spark.ml.recommendation.ALS
|for more conventional use.
""".stripMargin)
}
def main(args: Array[String]) {
args match {
case Array(m, u, f, iters) =>
M = m.toInt
U = u.toInt
F = f.toInt
ITERATIONS = iters.toInt
case _ =>
System.err.println("Usage: LocalALS <M> <U> <F> <iters>")
System.exit(1)
}
showWarning()
println(s"Running with M=$M, U=$U, F=$F, iters=$ITERATIONS")
val R = generateR()
// Initialize m and u randomly
var ms = Array.fill(M)(randomVector(F))
var us = Array.fill(U)(randomVector(F))
// Iteratively update movies then users
for (iter <- 1 to ITERATIONS) {
println(s"Iteration $iter:")
ms = (0 until M).map(i => updateMovie(i, ms(i), us, R)).toArray
us = (0 until U).map(j => updateUser(j, us(j), ms, R)).toArray
println(s"RMSE = ${rmse(R, ms, us)}")
}
}
private def randomVector(n: Int): RealVector =
new ArrayRealVector(Array.fill(n)(math.random))
private def randomMatrix(rows: Int, cols: Int): RealMatrix =
new Array2DRowRealMatrix(Array.fill(rows, cols)(math.random))
}
// scalastyle:on println
| lhfei/spark-in-action | spark-2.x/src/main/scala/org/apache/spark/examples/LocalALS.scala | Scala | apache-2.0 | 4,775 |
/*
* Copyright 2009-2010 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.norbert.network.client.loadbalancer
import com.linkedin.norbert.cluster.{InvalidClusterException, Node}
/**
* A <code>LoadBalancer</code> handles calculating the next <code>Node</code> a message should be routed to.
*/
trait LoadBalancer {
/**
* Returns the next <code>Node</code> a message should be routed to.
*
* @return the <code>Some(node)</code> to route the next message to or <code>None</code> if there are no <code>Node</code>s
* available
*/
def nextNode: Option[Node]
}
/**
* A factory which can generate <code>LoadBalancer</code>s.
*/
trait LoadBalancerFactory {
/**
* Create a new load balancer instance based on the currently available <code>Node</code>s.
*
* @param nodes the currently available <code>Node</code>s in the cluster
*
* @return a new <code>LoadBalancer</code> instance
* @throws InvalidClusterException thrown to indicate that the current cluster topology is invalid in some way and
* it is impossible to create a <code>LoadBalancer</code>
*/
@throws(classOf[InvalidClusterException])
def newLoadBalancer(nodes: Set[Node]): LoadBalancer
}
/**
* A component which provides a <code>LoadBalancerFactory</code>.
*/
trait LoadBalancerFactoryComponent {
val loadBalancerFactory: LoadBalancerFactory
}
| rhavyn/norbert | network/src/main/scala/com/linkedin/norbert/network/client/loadbalancer/LoadBalancerFactory.scala | Scala | apache-2.0 | 1,905 |
/*
* # Trove
*
* This file is part of Trove - A FREE desktop budgeting application that
* helps you track your finances, FREES you from complex budgeting, and
* enables you to build your TROVE of savings!
*
* Copyright © 2016-2021 Eric John Fredericks.
*
* Trove is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Trove is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Trove. If not, see <http://www.gnu.org/licenses/>.
*/
package trove.core.infrastructure.persist
import org.mockito.MockitoSugar
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should
import slick.dbio.{DBIOAction, NoStream}
import slick.jdbc.DriverDataSource
import slick.util.ClassLoaderUtil
import trove.constants.ProjectsHomeDir
import trove.core.infrastructure.persist.lock.ProjectLock
import trove.core.infrastructure.persist.schema.Tables
import trove.exceptional.{PersistenceError, PersistenceException, SystemError}
import java.io.File
import java.sql.SQLException
import javax.sql.DataSource
import scala.concurrent.Future
import scala.reflect.runtime.universe._
import scala.util.{Failure, Success, Try}
class ProjectPersistenceServiceSpec extends AnyFlatSpec with should.Matchers with MockitoSugar {
import ProjectPersistenceService._
trait ProjectDirFixture {
import slick.jdbc.SQLiteProfile.backend._
val mockDbFile: File = mock[File]
when(mockDbFile.exists()).thenReturn(true)
when(mockDbFile.getAbsolutePath).thenReturn("/foo/bar")
var runDbIOActions: Seq[DBIOAction[_, NoStream, Nothing]] = Seq.empty
var forDataSourceArgs: Seq[(DataSource, Int)] = Seq.empty
val mockLock: ProjectLock = mock[ProjectLock]
when(mockLock.lock()).thenReturn(Success((): Unit))
val mockDb = mock[DatabaseDef]
var mockDbError: Boolean = false
val SqlException = new SQLException("Mock DB Error")
var dbActionCount = 0
var dbActionFailOn: Int = -1
var dbVersionQueryResult: Seq[DBVersion] = Seq(Tables.CurrentDbVersion)
trait MockPersistence extends PersistenceOps {
override def newProjectLock(projectsHomeDir: File, projectName: String): ProjectLock =
mockLock
override def createDbFile(directory: File, filename: String): File =
mockDbFile
override def forDataSource(ds: DataSource, numWorkers: Int): DatabaseDef = {
forDataSourceArgs :+= (ds, numWorkers)
if(mockDbError) throw SqlException
mockDb
}
override def runDBIOAction[R: TypeTag](a: DBIOAction[R,NoStream,Nothing])(db: DatabaseDef) : Future[R] = {
runDbIOActions :+= a
assume(db == mockDb)
dbActionCount += 1
if(dbActionFailOn == dbActionCount)
Future.failed[R](new Exception(s"failed as expected: dbActionFailOn: $dbActionFailOn"))
else {
typeOf[R] match {
case r if r =:= typeOf[Unit] =>
Future.successful({}).asInstanceOf[Future[R]]
case r if r =:= typeOf[Seq[Tables.Version#TableElementType]] =>
Future.successful(dbVersionQueryResult).asInstanceOf[Future[R]]
case _ =>
Future.failed[R](new RuntimeException(s"Unknown type: ${typeOf[R]}"))
}
}
}
}
val tempDir: File = mock[File]
when(tempDir.isDirectory).thenReturn(true)
when(tempDir.listFiles()).thenReturn(Array.empty[File])
val projectService: ProjectPersistenceServiceImpl = new ProjectPersistenceServiceImpl(tempDir) with MockPersistence with HasShutdownHook
def mockFile(name: String, directory: Boolean = false): File = {
require(name != null)
val file = mock[File]
when(file.isFile).thenReturn(!directory)
when(file.getName).thenReturn(name)
file
}
}
trait IgnoredContentsFixture extends ProjectDirFixture {
val subdir: File = mockFile("subdir", directory = true)
val lockFile: File = mockFile(s"junk.${ProjectLock.LockfileSuffix}")
val dotFile: File = mockFile(".dotfile")
when(tempDir.listFiles()).thenReturn(Array(subdir, lockFile, dotFile))
}
trait NormalProjectsFixture extends ProjectDirFixture {
import ProjectPersistenceService._
val abc: File = mockFile(s"abc$DbFilenameSuffix")
val `def`: File = mockFile(s"def$DbFilenameSuffix")
val ghi: File = mockFile(s"ghi$DbFilenameSuffix")
when(tempDir.listFiles()).thenReturn(Array(abc, `def`, ghi))
}
"Trove project persistence service" should "utilize project home dir" in {
ProjectPersistenceService().asInstanceOf[ProjectPersistenceServiceImpl].projectsHomeDir shouldBe ProjectsHomeDir
}
it should "add shutdown hook" in {
val shutdownHook = ProjectPersistenceService().asInstanceOf[HasShutdownHook].shutdownHook
Runtime.getRuntime.removeShutdownHook(shutdownHook) shouldBe true
}
"listProjects" should "return nothing if the project directory is empty" in new ProjectDirFixture {
projectService.listProjects shouldBe Success(Seq.empty)
}
it should "return nothing if the project directory contains only ignored files" in new IgnoredContentsFixture {
projectService.listProjects shouldBe Success(Seq.empty)
}
it should "strip the filename extension from valid project files" in new NormalProjectsFixture {
val result: Try[Seq[String]] = projectService.listProjects
result.isFailure shouldBe false
val projectNames: Seq[String] = result.get
projectNames should not be empty
projectNames.foreach(_.endsWith(DbFilenameSuffix) shouldBe false)
}
it should "return a sorted list of project names with filename extensions stripped" in new NormalProjectsFixture {
val result: Try[Seq[String]] = projectService.listProjects
result.isFailure shouldBe false
result.get shouldBe Seq("abc", "def", "ghi")
}
it should "return a failure if an exception is thrown while listing files" in new ProjectDirFixture {
val file: File = mock[File]
when(file.isDirectory).thenReturn(true)
val ex = new RuntimeException("doom")
when(file.listFiles).thenThrow(ex)
override val projectService = new ProjectPersistenceServiceImpl(file) with MockPersistence
projectService.listProjects match {
case Failure(e) =>
e shouldBe ex
case somethingElse =>
fail(s"Unexpected result: $somethingElse")
}
}
"initializeProject" should "open an existing project and lock it" in new NormalProjectsFixture {
val projectNames: Try[Seq[String]] = projectService.listProjects
projectNames.isSuccess shouldBe true
val projectName: String = projectNames.get.head
projectService.initializeProject(projectName) match {
case result@Success(project) =>
project.name shouldBe projectName
project.db should not be null
project.lock shouldBe mockLock
verify(project.lock, times(1)).lock()
verify(project.lock, never).release()
runDbIOActions should contain theSameElementsAs List(Tables.versionQuery)
forDataSourceArgs should have size 1
val (ds, numWorkers) = forDataSourceArgs.head
ds shouldBe a [DriverDataSource]
val dds = ds.asInstanceOf[DriverDataSource]
dds.url shouldBe "jdbc:sqlite:/foo/bar"
dds.user shouldBe null
dds.password shouldBe null
dds.properties shouldBe null
dds.driverClassName shouldBe "org.sqlite.JDBC"
dds.classLoader shouldBe ClassLoaderUtil.defaultClassLoader
numWorkers shouldBe 1
projectService.currentProject shouldBe result.toOption
verifyNoMoreInteractions(mockDb, mockLock)
case somethingElse =>
fail(s"Wrong result when opening project: $somethingElse")
}
}
it should "create a new project with initial tables, create the version table, and lock the project" in new NormalProjectsFixture {
val projectNames: Try[Seq[String]] = projectService.listProjects
projectNames.isSuccess shouldBe true
val allProjectNames: Seq[String] = projectNames.get
val newProjectName = "foo"
assume(!allProjectNames.contains(newProjectName)) // sanity check
when(mockDbFile.exists()).thenReturn(false)
projectService.initializeProject(newProjectName) match {
case result@Success(project) =>
project.name shouldBe newProjectName
project.db should not be null
project.lock shouldBe mockLock
verify(project.lock, times(1)).lock()
verify(project.lock, never).release()
runDbIOActions should contain theSameElementsInOrderAs List(Tables.setupAction, Tables.versionQuery)
forDataSourceArgs should have size 1
val (ds, numWorkers) = forDataSourceArgs.head
ds shouldBe a [DriverDataSource]
val dds = ds.asInstanceOf[DriverDataSource]
dds.url shouldBe "jdbc:sqlite:/foo/bar"
dds.user shouldBe null
dds.password shouldBe null
dds.properties shouldBe null
dds.driverClassName shouldBe "org.sqlite.JDBC"
dds.classLoader shouldBe ClassLoaderUtil.defaultClassLoader
numWorkers shouldBe 1
projectService.currentProject shouldBe result.toOption
verifyNoMoreInteractions(mockDb, mockLock)
case somethingElse =>
fail(s"Wrong result when opening project: $somethingElse")
}
}
it should "fail with a SystemError if unable to obtain a project lock" in new NormalProjectsFixture {
val projectNames: Try[Seq[String]] = projectService.listProjects
projectNames.isSuccess shouldBe true
val projectName: String = projectNames.get.head
val ise = new IllegalStateException("lock error")
when(mockLock.lock()).thenReturn(SystemError("doom", ise))
projectService.initializeProject(projectName) match {
case SystemError(_, cause) =>
cause shouldBe Some(ise)
verify(mockLock, times(1)).lock()
verify(mockLock, times(1)).release()
verifyNoMoreInteractions(mockDb, mockLock)
runDbIOActions shouldBe empty
forDataSourceArgs shouldBe empty
projectService.currentProject shouldBe None
case somethingElse =>
fail(s"Wrong result when opening project: $somethingElse")
}
}
it should "return a PersistenceError if a project is already open" in new NormalProjectsFixture {
val projectNames: Try[Seq[String]] = projectService.listProjects
projectNames.isSuccess shouldBe true
val projectName: String = projectNames.get.head
projectService.initializeProject(projectName) match {
case Success(_) =>
verify(mockLock, times(1)).lock()
verify(mockLock, never).release()
val anotherProj = "foobarbaz"
assume(!projectNames.get.contains(anotherProj))
when(mockDbFile.exists()).thenReturn(false)
projectService.initializeProject(anotherProj) match {
case Failure(_: PersistenceException) =>
// ok
case somethingUnexpected =>
fail(s"Wrong result when opening second project: $somethingUnexpected")
}
verifyNoMoreInteractions(mockDb, mockLock)
case somethingElse =>
fail(s"Wrong result when opening first project: $somethingElse")
}
}
it should "fail with a PersistenceError and clean up project lock if unable to open database" in new NormalProjectsFixture {
val projectNames: Try[Seq[String]] = projectService.listProjects
projectNames.isSuccess shouldBe true
val projectName: String = projectNames.get.head
mockDbError = true // turn this on to throw exception when opening db
projectService.initializeProject(projectName) match {
case Failure(e: PersistenceException) =>
verify(mockLock, times(1)).lock()
verify(mockLock, times(1)).release()
e.cause shouldBe Some(SqlException)
verifyNoMoreInteractions(mockLock, mockDb)
case somethingElse =>
fail(s"Wrong result when opening project: $somethingElse")
}
}
it should "fail with a PersistenceError and clean up the project lock if there is a problem performing the DB version query for an existing project" in new NormalProjectsFixture {
val projectNames: Try[Seq[String]] = projectService.listProjects
val projectName: String = projectNames.get.head
dbActionFailOn = 1
projectService.initializeProject(projectName) match {
case PersistenceError(_, cause) =>
verify(mockLock, times(1)).lock()
verify(mockLock, times(1)).release()
cause should not be empty
cause.get.getMessage should endWith (s": $dbActionFailOn")
forDataSourceArgs should have size 1
val (ds, numWorkers) = forDataSourceArgs.head
ds shouldBe a [DriverDataSource]
numWorkers shouldBe 1
runDbIOActions should have size 1
runDbIOActions should contain theSameElementsAs List(Tables.versionQuery)
verifyNoMoreInteractions(mockLock, mockDb)
case somethingElse =>
fail(s"Wrong result when opening project: $somethingElse")
}
}
it should "fail with a PersistenceError and clean up the project lock if there is a problem performing the DB version query for a new project" in new NormalProjectsFixture {
val projectNames: Try[Seq[String]] = projectService.listProjects
val anotherProj = "foobarbaz"
assume(!projectNames.get.contains(anotherProj))
when(mockDbFile.exists()).thenReturn(false)
dbActionFailOn = 2
projectService.initializeProject(anotherProj) match {
case PersistenceError(_, cause) =>
verify(mockLock, times(1)).lock()
verify(mockLock, times(1)).release()
cause should not be empty
cause.get.getMessage should endWith (s": $dbActionFailOn")
forDataSourceArgs should have size 1
val (ds, numWorkers) = forDataSourceArgs.head
ds shouldBe a [DriverDataSource]
numWorkers shouldBe 1
runDbIOActions should have size 2
runDbIOActions should contain theSameElementsAs List(Tables.setupAction, Tables.versionQuery)
verifyNoMoreInteractions(mockLock, mockDb)
case somethingElse =>
fail(s"Wrong result when opening project: $somethingElse")
}
}
it should "fail with a PersistenceError and clean up the project lock if there is a problem setting up the tables for a new project" in new NormalProjectsFixture {
val projectNames: Try[Seq[String]] = projectService.listProjects
val anotherProj = "foobarbaz"
assume(!projectNames.get.contains(anotherProj))
when(mockDbFile.exists()).thenReturn(false)
dbActionFailOn = 1
projectService.initializeProject(anotherProj) match {
case PersistenceError(_, cause) =>
verify(mockLock, times(1)).lock()
verify(mockLock, times(1)).release()
cause should not be empty
cause.get.getMessage should endWith (s": $dbActionFailOn")
forDataSourceArgs should have size 1
val (ds, numWorkers) = forDataSourceArgs.head
ds shouldBe a [DriverDataSource]
numWorkers shouldBe 1
runDbIOActions should have size 1
runDbIOActions should contain theSameElementsAs List(Tables.setupAction)
verifyNoMoreInteractions(mockLock, mockDb)
case somethingElse =>
fail(s"Wrong result when opening project: $somethingElse")
}
}
it should "fail with a PersistenceError and clean up the project lock if the wrong database version exists" in new NormalProjectsFixture {
val projectNames: Try[Seq[String]] = projectService.listProjects
val projectName: String = projectNames.get.head
val newDbVersion: Long = dbVersionQueryResult.head.id -1
dbVersionQueryResult = Seq(dbVersionQueryResult.head.copy(id = newDbVersion))
projectService.initializeProject(projectName) match {
case PersistenceError(message, _) =>
verify(mockLock, times(1)).lock()
verify(mockLock, times(1)).release()
forDataSourceArgs should have size 1
val (ds, numWorkers) = forDataSourceArgs.head
ds shouldBe a [DriverDataSource]
numWorkers shouldBe 1
runDbIOActions should have size 1
runDbIOActions should contain theSameElementsAs List(Tables.versionQuery)
verifyNoMoreInteractions(mockLock, mockDb)
message should endWith (newDbVersion.toString)
case somethingElse =>
fail(s"Wrong result when opening project: $somethingElse")
}
}
it should "fail with a PersistenceError and clean up the project lock if there are too many rows in the database version table" in new NormalProjectsFixture {
val projectNames: Try[Seq[String]] = projectService.listProjects
val projectName: String = projectNames.get.head
val newDbVersion: Long = dbVersionQueryResult.head.id -1
dbVersionQueryResult = Seq(dbVersionQueryResult.head, dbVersionQueryResult.head.copy(id = newDbVersion))
projectService.initializeProject(projectName) match {
case PersistenceError(message, cause) =>
verify(mockLock, times(1)).lock()
verify(mockLock, times(1)).release()
forDataSourceArgs should have size 1
val (ds, numWorkers) = forDataSourceArgs.head
ds shouldBe a [DriverDataSource]
numWorkers shouldBe 1
runDbIOActions should have size 1
runDbIOActions should contain theSameElementsAs List(Tables.versionQuery)
verifyNoMoreInteractions(mockLock, mockDb)
message should endWith (s"found ${dbVersionQueryResult.size} rows")
case somethingElse =>
fail(s"Wrong result when opening project: $somethingElse")
}
}
"Public open method" should "open an existing project" in new NormalProjectsFixture {
val projectNames: Try[Seq[String]] = projectService.listProjects
projectNames.isSuccess shouldBe true
val projectName: String = projectNames.get.head
projectService.open(projectName) match {
case result@Success(prj) =>
prj.name shouldBe projectName
prj shouldBe a [ProjectImpl]
val project = prj.asInstanceOf[ProjectImpl]
project.db should not be null
project.lock shouldBe mockLock
verify(project.lock, times(1)).lock()
verify(project.lock, never).release()
runDbIOActions should contain theSameElementsAs List(Tables.versionQuery)
forDataSourceArgs should have size 1
val (ds, numWorkers) = forDataSourceArgs.head
ds shouldBe a [DriverDataSource]
val dds = ds.asInstanceOf[DriverDataSource]
dds.url shouldBe "jdbc:sqlite:/foo/bar"
dds.user shouldBe null
dds.password shouldBe null
dds.properties shouldBe null
dds.driverClassName shouldBe "org.sqlite.JDBC"
dds.classLoader shouldBe ClassLoaderUtil.defaultClassLoader
numWorkers shouldBe 1
projectService.currentProject shouldBe result.toOption
verifyNoMoreInteractions(mockDb, mockLock)
case somethingElse =>
fail(s"Wrong result when opening project: $somethingElse")
}
}
it should "create a new project performing the appropriate operations" in new NormalProjectsFixture {
val projectNames: Try[Seq[String]] = projectService.listProjects
projectNames.isSuccess shouldBe true
val allProjectNames: Seq[String] = projectNames.get
val newProjectName = "foo"
assume(!allProjectNames.contains(newProjectName)) // sanity check
when(mockDbFile.exists()).thenReturn(false)
projectService.open(newProjectName) match {
case result@Success(prj) =>
prj shouldBe a [ProjectImpl]
val project = prj.asInstanceOf[ProjectImpl]
prj.name shouldBe newProjectName
project.db should not be null
project.lock shouldBe mockLock
verify(project.lock, times(1)).lock()
verify(project.lock, never).release()
runDbIOActions should contain theSameElementsInOrderAs List(Tables.setupAction, Tables.versionQuery)
forDataSourceArgs should have size 1
val (ds, numWorkers) = forDataSourceArgs.head
ds shouldBe a [DriverDataSource]
val dds = ds.asInstanceOf[DriverDataSource]
dds.url shouldBe "jdbc:sqlite:/foo/bar"
dds.user shouldBe null
dds.password shouldBe null
dds.properties shouldBe null
dds.driverClassName shouldBe "org.sqlite.JDBC"
dds.classLoader shouldBe ClassLoaderUtil.defaultClassLoader
numWorkers shouldBe 1
projectService.currentProject shouldBe result.toOption
verifyNoMoreInteractions(mockDb, mockLock)
case somethingElse =>
fail(s"Wrong result when opening project: $somethingElse")
}
}
"closeCurrentProject" should "cleanup resources when closing the project" in new NormalProjectsFixture {
val projectNames: Try[Seq[String]] = projectService.listProjects
projectNames.isSuccess shouldBe true
val allProjectNames: Seq[String] = projectNames.get
val projectName: String = allProjectNames.head
projectService.open(projectName) match {
case Success(_) =>
verify(mockLock, times(1)).lock()
verify(mockLock, never).release()
projectService.closeCurrentProject() match {
case Success(_) =>
projectService.currentProject shouldBe empty
verify(mockLock, times(1)).release()
verify(mockDb, times(1)).close()
verifyNoMoreInteractions(mockLock, mockDb)
case somethingElse =>
fail(s"Wrong result when closing project: $somethingElse")
}
case somethingElse =>
fail(s"Wrong result when opening project: $somethingElse")
}
}
it should "return succcess if there is no open project" in new NormalProjectsFixture {
assume(projectService.currentProject.isEmpty)
projectService.closeCurrentProject() match {
case Success(_) =>
// ok
case somethingElse =>
fail("Wrong result when closing current project when there is no project. This should be a no-op.")
}
}
it should "fail with a PersistenceError if the database cannot be closed" in new NormalProjectsFixture {
val projectNames: Try[Seq[String]] = projectService.listProjects
projectNames.isSuccess shouldBe true
val allProjectNames: Seq[String] = projectNames.get
val projectName: String = allProjectNames.head
projectService.open(projectName) match {
case Success(_) =>
verify(mockLock, times(1)).lock()
verify(mockLock, never).release()
val dbCloseException = new RuntimeException("db doom")
doThrow(dbCloseException).when(mockDb).close()
projectService.closeCurrentProject() match {
case PersistenceError(_, cause) =>
cause shouldBe Some(dbCloseException)
projectService.currentProject should not be empty
verify(mockLock, never).release()
verify(mockDb, times(1)).close()
verifyNoMoreInteractions(mockLock, mockDb)
case somethingElse =>
fail(s"Wrong result when closing project: $somethingElse")
}
case somethingElse =>
fail(s"Wrong result when opening project: $somethingElse")
}
}
it should "fail with a PersistenceError if it cannot release the project lock" in new NormalProjectsFixture {
val projectNames: Try[Seq[String]] = projectService.listProjects
projectNames.isSuccess shouldBe true
val allProjectNames: Seq[String] = projectNames.get
val projectName: String = allProjectNames.head
projectService.open(projectName) match {
case Success(_) =>
verify(mockLock, times(1)).lock()
verify(mockLock, never).release()
val lockReleaseException = new RuntimeException("db doom")
doThrow(lockReleaseException).when(mockLock).release()
projectService.closeCurrentProject() match {
case PersistenceError(_, cause) =>
cause shouldBe Some(lockReleaseException)
projectService.currentProject should not be empty
verify(mockLock, times(1)).release()
verify(mockDb, times(1)).close()
verifyNoMoreInteractions(mockLock, mockDb)
case somethingElse =>
fail(s"Wrong result when closing project: $somethingElse")
}
case somethingElse =>
fail(s"Wrong result when opening project: $somethingElse")
}
}
"shutdown hook" should "close the database and release the project lock if invoked" in new NormalProjectsFixture {
val projectNames: Try[Seq[String]] = projectService.listProjects
projectNames.isSuccess shouldBe true
val allProjectNames: Seq[String] = projectNames.get
val projectName: String = allProjectNames.head
projectService.open(projectName) match {
case Success(_) =>
val shutdownHook = projectService.asInstanceOf[HasShutdownHook].shutdownHook
shutdownHook.run()
projectService.currentProject shouldBe empty
Runtime.getRuntime.removeShutdownHook(shutdownHook) shouldBe true // This proves it wasn't removed and ...
Runtime.getRuntime.removeShutdownHook(shutdownHook) shouldBe false // it can only be removed once
case somethingElse =>
fail(s"Wrong result when opening project: $somethingElse")
}
}
}
| emanchgo/trove | src/test/scala/trove/core/infrastructure/persist/ProjectPersistenceServiceSpec.scala | Scala | gpl-3.0 | 25,708 |
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.
package com.microsoft.ml.spark
import com.microsoft.ml.spark.TrainRegressorTestUtilities._
import com.microsoft.ml.spark.TrainClassifierTestUtilities._
import com.microsoft.ml.spark.schema.{CategoricalUtilities, SchemaConstants, SparkSchema}
import org.apache.spark.ml.classification.LogisticRegression
import org.apache.spark.ml.evaluation.BinaryClassificationEvaluator
import org.apache.spark.ml.linalg.Vector
import org.apache.spark.sql._
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.{DoubleType, StructField, StructType}
/** Tests to validate the functionality of Evaluate Model module. */
class VerifyComputeModelStatistics extends TestBase {
test("Smoke test for evaluating a dataset") {
val labelColumn = "label"
val predictionColumn = SchemaConstants.SparkPredictionColumn
val dataset = session.createDataFrame(Seq(
(0.0, 2, 0.50, 0.60, 0.0),
(1.0, 3, 0.40, 0.50, 1.0),
(2.0, 4, 0.78, 0.99, 2.0),
(3.0, 5, 0.12, 0.34, 3.0),
(0.0, 1, 0.50, 0.60, 0.0),
(1.0, 3, 0.40, 0.50, 1.0),
(2.0, 3, 0.78, 0.99, 2.0),
(3.0, 4, 0.12, 0.34, 3.0),
(0.0, 0, 0.50, 0.60, 0.0),
(1.0, 2, 0.40, 0.50, 1.0),
(2.0, 3, 0.78, 0.99, 2.0),
(3.0, 4, 0.12, 0.34, 3.0)))
.toDF(labelColumn, "col1", "col2", "col3", predictionColumn)
val scoreModelName = SchemaConstants.ScoreModelPrefix + "_test model"
val datasetWithLabel =
SparkSchema.setLabelColumnName(dataset, scoreModelName, labelColumn, SchemaConstants.RegressionKind)
val datasetWithScores =
SparkSchema.setScoresColumnName(datasetWithLabel, scoreModelName, predictionColumn,
SchemaConstants.RegressionKind)
val evaluatedSchema = new ComputeModelStatistics().transformSchema(datasetWithScores.schema)
val evaluatedData = new ComputeModelStatistics().transform(datasetWithScores)
val firstRow = evaluatedData.first()
assert(firstRow.get(0) == 0.0)
assert(firstRow.get(1) == 0.0)
assert(firstRow.get(2) == 1.0)
assert(firstRow.get(3) == 0.0)
assert(evaluatedSchema == StructType(ComputeModelStatistics.regressionColumns.map(StructField(_, DoubleType))))
}
test("Evaluate a dataset with missing values") {
val labelColumn = "label"
val predictionColumn = SchemaConstants.SparkPredictionColumn
val dataset = session.createDataFrame(sc.parallelize(Seq(
(0.0, 0.0),
(0.0, null),
(1.0, 1.0),
(2.0, 2.0),
(null, null),
(0.0, 0.0),
(null, 3.0))).map(values => Row(values._1, values._2)),
StructType(Array(StructField(labelColumn, DoubleType, true),
StructField(predictionColumn, DoubleType, true))))
.toDF(labelColumn, predictionColumn)
val scoreModelName = SchemaConstants.ScoreModelPrefix + "_test model"
val datasetWithLabel =
SparkSchema.setLabelColumnName(dataset, scoreModelName, labelColumn, SchemaConstants.RegressionKind)
val datasetWithScores =
SparkSchema.setScoresColumnName(datasetWithLabel, scoreModelName, predictionColumn,
SchemaConstants.RegressionKind)
val evaluatedData = new ComputeModelStatistics().transform(datasetWithScores)
val firstRow = evaluatedData.first()
assert(firstRow.get(0) == 0.0)
assert(firstRow.get(1) == 0.0)
assert(firstRow.get(2) == 1.0)
assert(firstRow.get(3) == 0.0)
}
test("Smoke test to train regressor, score and evaluate on a dataset using all three modules") {
val dataset = session.createDataFrame(Seq(
(0, 2, 0.50, 0.60, 0),
(1, 3, 0.40, 0.50, 1),
(2, 4, 0.78, 0.99, 2),
(3, 5, 0.12, 0.34, 3),
(0, 1, 0.50, 0.60, 0),
(1, 3, 0.40, 0.50, 1),
(2, 3, 0.78, 0.99, 2),
(3, 4, 0.12, 0.34, 3),
(0, 0, 0.50, 0.60, 0),
(1, 2, 0.40, 0.50, 1),
(2, 3, 0.78, 0.99, 2),
(3, 4, 0.12, 0.34, 3)
)).toDF("labelColumn", "col1", "col2", "col3", "col4")
val labelColumn = "someOtherColumn"
val datasetWithAddedColumn = dataset.withColumn(labelColumn, org.apache.spark.sql.functions.lit(0.0))
val linearRegressor = createLinearRegressor(labelColumn)
val scoredDataset =
TrainRegressorTestUtilities.trainScoreDataset(labelColumn, datasetWithAddedColumn, linearRegressor)
val evaluatedData = new ComputeModelStatistics().transform(scoredDataset)
val firstRow = evaluatedData.first()
assert(firstRow.get(0) == 0.0)
assert(firstRow.get(1) == 0.0)
assert(firstRow.get(2).asInstanceOf[Double].isNaN)
assert(firstRow.get(3) == 0.0)
}
test("Smoke test to train classifier, score and evaluate on a dataset using all three modules") {
val labelColumn = "Label"
val dataset = session.createDataFrame(Seq(
(0, 2, 0.50, 0.60, 0),
(1, 3, 0.40, 0.50, 1),
(0, 4, 0.78, 0.99, 2),
(1, 5, 0.12, 0.34, 3),
(0, 1, 0.50, 0.60, 0),
(1, 3, 0.40, 0.50, 1),
(0, 3, 0.78, 0.99, 2),
(1, 4, 0.12, 0.34, 3),
(0, 0, 0.50, 0.60, 0),
(1, 2, 0.40, 0.50, 1),
(0, 3, 0.78, 0.99, 2),
(1, 4, 0.12, 0.34, 3)
)).toDF(labelColumn, "col1", "col2", "col3", "col4")
val logisticRegressor = createLogisticRegressor(labelColumn)
val scoredDataset = TrainClassifierTestUtilities.trainScoreDataset(labelColumn, dataset, logisticRegressor)
val evaluatedData = new ComputeModelStatistics().transform(scoredDataset)
val evaluatedSchema = new ComputeModelStatistics().transformSchema(scoredDataset.schema)
assert(evaluatedSchema == StructType(ComputeModelStatistics.classificationColumns.map(StructField(_, DoubleType))))
}
test("Verify results of multiclass metrics") {
val labelColumn = "label"
val predictionColumn = SchemaConstants.SparkPredictionColumn
val labelsAndPrediction = session.createDataFrame(
Seq(
(0.0, 0.0),
(0.0, 0.0),
(0.0, 1.0),
(0.0, 2.0),
(1.0, 0.0),
(1.0, 1.0),
(1.0, 1.0),
(1.0, 1.0),
(2.0, 2.0))).toDF(labelColumn, predictionColumn)
val scoreModelName = SchemaConstants.ScoreModelPrefix + "_test model"
val datasetWithLabel =
SparkSchema.setLabelColumnName(labelsAndPrediction, scoreModelName, labelColumn,
SchemaConstants.ClassificationKind)
val datasetWithScoredLabels =
SparkSchema.setScoredLabelsColumnName(datasetWithLabel, scoreModelName, predictionColumn,
SchemaConstants.ClassificationKind)
val evaluatedData = new ComputeModelStatistics().transform(datasetWithScoredLabels)
val tp0 = 2.0
val tp1 = 3.0
val tp2 = 1.0
val tn0 = 4.0
val tn1 = 4.0
val tn2 = 7.0
val numLabels = 3.0
val total = labelsAndPrediction.count()
val precision0 = 2.0 / (2 + 1)
val precision1 = 3.0 / (3 + 1)
val precision2 = 1.0 / (1 + 1)
val recall0 = 2.0 / (2 + 2)
val recall1 = 3.0 / (3 + 1)
val recall2 = 1.0 / (1 + 0)
val overallAccuracy = (tp0 + tp1 + tp2) / total
val evalRow = evaluatedData.first()
assert(evalRow.getAs[Double](ComputeModelStatistics.AccuracyColumnName) == overallAccuracy)
assert(evalRow.getAs[Double](ComputeModelStatistics.PrecisionColumnName) == overallAccuracy)
assert(evalRow.getAs[Double](ComputeModelStatistics.RecallColumnName) == overallAccuracy)
val avgAccuracy = ((tp0 + tn0) / total + (tp1 + tn1) / total + (tp2 + tn2) / total) / numLabels
val macroPrecision = (precision0 + precision1 + precision2) / numLabels
val macroRecall = (recall0 + recall1 + recall2) / numLabels
assert(evalRow.getAs[Double](ComputeModelStatistics.AverageAccuracy) == avgAccuracy)
assert(evalRow.getAs[Double](ComputeModelStatistics.MacroAveragedPrecision) == macroPrecision)
assert(evalRow.getAs[Double](ComputeModelStatistics.MacroAveragedRecall) == macroRecall)
}
test("validate AUC from compute model statistic and binary classification evaluator gives the same result") {
val fileLocation = ClassifierTestUtils.classificationTrainFile("transfusion.csv").toString
val label = "Donated"
val dataset: DataFrame =
session.read.format("com.databricks.spark.csv")
.option("header", "true").option("inferSchema", "true")
.option("treatEmptyValuesAsNulls", "false")
.option("delimiter", ",")
.load(fileLocation)
val split = dataset.randomSplit(Array(0.75,0.25))
val train = split(0)
val test = split(1)
val trainClassifier = new TrainClassifier()
val model = trainClassifier.setModel(new LogisticRegression())
.set(trainClassifier.labelCol, label)
.set(trainClassifier.numFeatures, 1 << 18)
.fit(train)
val scored = model.transform(test)
val eval = new ComputeModelStatistics().transform(scored)
val cmsAUC = eval.first().getAs[Double]("AUC")
val binaryEvaluator = new BinaryClassificationEvaluator()
.setMetricName("areaUnderROC")
.setLabelCol(label)
.setRawPredictionCol(SchemaConstants.ScoresColumn)
val levels = CategoricalUtilities.getLevels(scored.schema, label)
val levelsToIndexMap: Map[Any, Double] = levels.get.zipWithIndex.map(t => t._1 -> t._2.toDouble).toMap
// Calculate confusion matrix and output it as DataFrame
val predictionAndLabels = session
.createDataFrame(scored.select(col(SchemaConstants.ScoresColumn), col(label)).rdd.map {
case Row(prediction: Vector, label) => (prediction(1), levelsToIndexMap(label))
}).toDF(SchemaConstants.ScoresColumn, label)
val auc = binaryEvaluator.evaluate(predictionAndLabels)
assert(auc === cmsAUC)
}
}
| rastala/mmlspark | src/compute-model-statistics/src/test/scala/VerifyComputeModelStatistics.scala | Scala | mit | 9,751 |
package filodb.query.exec
import com.typesafe.config.ConfigFactory
import monix.reactive.Observable
import org.scalatest.concurrent.ScalaFutures
import filodb.core.{MachineMetricsData => MMD}
import filodb.core.metadata.Column.ColumnType
import filodb.core.query._
import filodb.memory.format.{ZeroCopyUTF8String => ZCUTF8}
import filodb.memory.format.vectors.HistogramWithBuckets
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers
class HistToPromSeriesMapperSpec extends AnyFunSpec with Matchers with ScalaFutures {
val config = ConfigFactory.load("application_test.conf").getConfig("filodb")
val queryConfig = new QueryConfig(config.getConfig("query"))
val querySession = QuerySession(QueryContext(), queryConfig)
import monix.execution.Scheduler.Implicits.global
val eightBucketData = MMD.linearHistSeries().take(20)
val rvKey = CustomRangeVectorKey(eightBucketData.head(5).asInstanceOf[Map[ZCUTF8, ZCUTF8]] +
(ZCUTF8("metric") -> ZCUTF8(eightBucketData.head(4).asInstanceOf[String])))
val eightBTimes = eightBucketData.map(_(0).asInstanceOf[Long])
val eightBHists = eightBucketData.map(_(3).asInstanceOf[HistogramWithBuckets])
val rows = eightBTimes.zip(eightBHists).map { case (t, h) => new TransientHistRow(t, h) }
val sourceSchema = new ResultSchema(Seq(ColumnInfo("timestamp", ColumnType.TimestampColumn),
ColumnInfo("value", ColumnType.HistogramColumn)), 1)
it("should convert single schema histogram to appropriate Prom bucket time series") {
import NoCloseCursor._
val rv = IteratorBackedRangeVector(rvKey, rows.toIterator)
val mapper = HistToPromSeriesMapper(MMD.histDataset.schema.partition)
val sourceObs = Observable.now(rv)
mapper.schema(sourceSchema).columns shouldEqual Seq(ColumnInfo("timestamp", ColumnType.TimestampColumn),
ColumnInfo("value", ColumnType.DoubleColumn))
val destObs = mapper.apply(sourceObs, querySession, 1000, sourceSchema, Nil)
val destRvs = destObs.toListL.runAsync.futureValue
// Should be 8 time series since there are 8 buckets
destRvs.length shouldEqual 8
// RVs should have same timestamps as source data and expected bucket values. Also check keys
destRvs.foreach { rv =>
val kvMap = rv.key.labelValues.map { case (k, v) => k.toString -> v.toString }
kvMap.contains("le") shouldEqual true
kvMap("metric").endsWith("_bucket") shouldEqual true
val le = kvMap("le").toDouble
rv.rows.map(_.getLong(0)).toSeq shouldEqual eightBTimes
// Figure out bucket number and extract bucket values for comparison
var bucketNo = 0
while (eightBHists.head.bucketTop(bucketNo) < le) bucketNo += 1
rv.rows.map(_.getDouble(1)).toSeq shouldEqual eightBHists.map(_.bucketValue(bucketNo))
}
}
val tenBucketData = MMD.linearHistSeries(startTs = 150000L, numBuckets = 10).take(10)
val tenBTimes = tenBucketData.map(_(0).asInstanceOf[Long])
val tenBHists = tenBucketData.map(_(3).asInstanceOf[HistogramWithBuckets])
val tenRows = tenBTimes.zip(tenBHists).map { case (t, h) => new TransientHistRow(t, h) }
it("should convert multiple schema histograms to Prom bucket time series") {
import filodb.core.query.NoCloseCursor._
val rv = IteratorBackedRangeVector(rvKey, (rows ++ tenRows).toIterator)
val mapper = HistToPromSeriesMapper(MMD.histDataset.schema.partition)
val sourceObs = Observable.now(rv)
mapper.schema(sourceSchema).columns shouldEqual Seq(ColumnInfo("timestamp", ColumnType.TimestampColumn),
ColumnInfo("value", ColumnType.DoubleColumn))
val destObs = mapper.apply(sourceObs, querySession, 1000, sourceSchema, Nil)
val destRvs = destObs.toListL.runAsync.futureValue
// Should be 10 time series since there are up to 10 buckets
destRvs.length shouldEqual 10
// Buckets that should have data for all timestamps: 2,4,8,16,32,64,128,256
// Buckets that should have NaN for first 20 timestamps: 512,1024
destRvs.foreach { rv =>
val kvMap = rv.key.labelValues.map { case (k, v) => k.toString -> v.toString }
kvMap.contains("le") shouldEqual true
kvMap("metric").endsWith("_bucket") shouldEqual true
val le = kvMap("le").toDouble
rv.rows.map(_.getLong(0)).toSeq shouldEqual (eightBTimes ++ tenBTimes)
// Figure out bucket number and extract bucket values for comparison
var bucketNo = 0
while (tenBHists.head.bucketTop(bucketNo) < le) bucketNo += 1
val bucketValues = rv.rows.map(_.getDouble(1)).toSeq
if (bucketNo < 8) {
bucketValues take 20 shouldEqual eightBHists.map(_.bucketValue(bucketNo))
} else {
(bucketValues take 20).forall(java.lang.Double.isNaN) shouldEqual true
}
bucketValues drop 20 shouldEqual tenBHists.map(_.bucketValue(bucketNo))
}
}
} | tuplejump/FiloDB | query/src/test/scala/filodb/query/exec/HistToPromSeriesMapperSpec.scala | Scala | apache-2.0 | 5,012 |
package de.tototec.sbuild.ant.tasks
import de.tototec.sbuild.Project
import org.apache.tools.ant.taskdefs.Copy
import de.tototec.sbuild.ant.AntProject
import java.io.File
import org.apache.tools.ant.types.FileSet
/**
* Wrapper for the [[http://ant.apache.org/manual/Tasks/copy.html Ant Copy task]].
*/
object AntCopy {
/**
* Creates, configures and executes an Ant Copy task.
*
* For parameter documentation see the constructor of [[AntCopy]].
*/
def apply(
file: File = null,
toFile: File = null,
toDir: File = null,
preserveLastModified: java.lang.Boolean = null,
filtering: java.lang.Boolean = null,
overwrite: java.lang.Boolean = null,
force: java.lang.Boolean = null,
flatten: java.lang.Boolean = null,
verbose: java.lang.Boolean = null,
includeEmptyDirs: java.lang.Boolean = null,
quiet: java.lang.Boolean = null,
enableMultipleMappings: java.lang.Boolean = null,
encoding: String = null,
outputEncoding: String = null,
granularity: java.lang.Long = null,
// since 0.1.0.9001
fileSets: Seq[FileSet] = null,
// since 0.1.3.9000
fileSet: FileSet = null)(implicit _project: Project) =
new AntCopy(
file = file,
toFile = toFile,
toDir = toDir,
preserveLastModified = preserveLastModified,
filtering = filtering,
overwrite = overwrite,
force = force,
flatten = flatten,
verbose = verbose,
includeEmptyDirs = includeEmptyDirs,
quiet = quiet,
enableMultipleMappings = enableMultipleMappings,
encoding = encoding,
outputEncoding = outputEncoding,
granularity = granularity,
fileSets = fileSets,
fileSet = fileSet
).execute
}
/**
* Wrapper for the [[http://ant.apache.org/manual/Tasks/copy.html Ant Copy task]].
*
* Copy a file or a directory or a set of files and directories.
*
*
*/
class AntCopy()(implicit _project: Project) extends Copy {
setProject(AntProject())
/**
* Creates and configures a copy task.
*
* @param file The file to copy.
* @param toFile The target file to copy to.
* @param toDir The target directory to copy to.
* @param preserveLastModified Give the copied files the same last modified time as the original source file.
* @param filtering Indicates whether token filtering uses the global build-file filters (of Ant) should take place during the copy.
* @param overwrite Overwrite existing files even if the destination files are newer.
* @param force Overwrite read-only destination files.
* @param flatten Ignore the directory structure of the source files, and copy all files into the directory specified by the `toDir` parameter.
* @param verbose Log the files that are being copied.
* @param includeEmptyDirs Copy any empty directories included in the FileSet(s).
* @param quiet
* If `true` and `failOnError` is `false`, then do not log a warning message when the file to copy does not exist
* or one of the nested file sets points to a directory that does not exist
* or an error occurs while copying.
* @param enableMultipleMappings If `true` the task will process to all the mappings for a given source path.
* @param encoding The encoding to assume when filter-copying the files.
* @param outputEncoding the encoding to use when writing the files.
* @param granularity The number of milliseconds leeway to give before deciding a file is out of date.
* @param fileSet A [[org.apache.tools.ant.types.FileSet]] used to select groups of files to copy.
* @param fileSets [[org.apache.tools.ant.types.FileSet]]'s used to select groups of files to copy.
*
*/
def this(
file: File = null,
toFile: File = null,
toDir: File = null,
preserveLastModified: java.lang.Boolean = null,
filtering: java.lang.Boolean = null,
overwrite: java.lang.Boolean = null,
force: java.lang.Boolean = null,
flatten: java.lang.Boolean = null,
verbose: java.lang.Boolean = null,
includeEmptyDirs: java.lang.Boolean = null,
quiet: java.lang.Boolean = null,
enableMultipleMappings: java.lang.Boolean = null,
encoding: String = null,
outputEncoding: String = null,
granularity: java.lang.Long = null,
// since 0.1.0.9001
fileSets: Seq[FileSet] = null,
// since 0.1.3.9000
fileSet: FileSet = null)(implicit _project: Project) {
this
if (file != null) setFile(file)
if (toFile != null) setTofile(toFile)
if (toDir != null) setTodir(toDir)
if (preserveLastModified != null) setPreserveLastModified(preserveLastModified.booleanValue)
if (filtering != null) setFiltering(filtering.booleanValue)
if (overwrite != null) setOverwrite(overwrite.booleanValue)
if (force != null) setOverwrite(overwrite.booleanValue)
if (flatten != null) setFlatten(flatten.booleanValue)
if (verbose != null) setVerbose(verbose.booleanValue)
if (includeEmptyDirs != null) setVerbose(includeEmptyDirs.booleanValue)
if (quiet != null) setQuiet(quiet.booleanValue)
if (enableMultipleMappings != null) setQuiet(enableMultipleMappings.booleanValue)
if (encoding != null) setEncoding(encoding)
if (outputEncoding != null) setOutputEncoding(outputEncoding)
if (granularity != null) setGranularity(granularity)
if (fileSets != null) fileSets.foreach { fileSet =>
addFileset(fileSet)
}
if (fileSet != null) addFileset(fileSet)
}
/** Set the target directory to copy to. */
def setToDir(toDir: File) = setTodir(toDir)
/** Set the target file to copy to. */
def setToFile(toFile: File) = setTofile(toFile)
} | SBuild-org/sbuild | de.tototec.sbuild.ant/src/main/scala/de/tototec/sbuild/ant/tasks/AntCopy.scala | Scala | apache-2.0 | 5,635 |
package com.taig.tmpltr.engine.html
import com.taig.tmpltr._
import play.api.mvc.Content
class h4( attributes: Attributes, content: Content )
extends h( 4, attributes, content )
with Tag.Body[h4, Content]
object h4
extends Tag.Body.Appliable[h4, Content] | Taig/Play-Tmpltr | app/com/taig/tmpltr/engine/html/h4.scala | Scala | mit | 258 |
package com.bio4j.dynamograph.mapper
import com.amazonaws.services.dynamodbv2.model.{AttributeValue, PutItemRequest}
import com.bio4j.dynamograph.model.Properties._
import com.bio4j.dynamograph.model.go.GoSchema.IsAType
import com.bio4j.dynamograph.parser.{ParsingContants, SingleElement}
import com.bio4j.dynamograph.testModel
import com.bio4j.dynamograph.testModel.{TestVertex, _}
import com.bio4j.dynamograph.writer.{AnyEdgeWriter, AnyVertexWriter}
import org.specs2.mock._
import org.specs2.mutable._
import org.specs2.specification.Scope
class GoMapperTest extends Specification with Mockito {
"GoMapper " should {
"throw exception for empty single element" in new context {
underTest.map(SingleElement(Map(),Nil)) must throwA[NoSuchElementException]
}
"throw exception for unknown vertex type " in new context {
override val underTest = new GoMapper(Map(), Map())
underTest.map(
SingleElement(Map(id.label -> "testLabel", ParsingContants.vertexType -> TestVertexType.label),Nil)
) must throwA[NoSuchElementException]
}
"throw exception for unknown vertex type " in new context {
override val underTest = new GoMapper(vertexWriters, Map())
underTest.map(
SingleElement(Map(id.label -> "testLabel", ParsingContants.vertexType -> "aa"),Nil)
) must throwA[NoSuchElementException]
}
"throw exception for vertex attributes without id" in new context{
override val underTest = new GoMapper(vertexWriters, Map())
underTest.map(
SingleElement(Map(testModel.name.label -> "testLabel", ParsingContants.vertexType -> TestVertexType.label),Nil)
) must throwA[NoSuchElementException]
}
"return single PutItemRequest" in new context {
override val underTest = new GoMapper(vertexWriters, Map())
vertexWriter.write(any[Map[String,AttributeValue]]) returns List(new PutItemRequest())
val result = underTest.map(
SingleElement(Map(id.label -> "testLabel", ParsingContants.vertexType -> TestVertexType.label
),Nil))
result should have size 1
}
"return PutItemRequests for vertex and edge" in new context {
vertexWriter.write(any[Map[String,AttributeValue]]) returns List(new PutItemRequest())
edgeWriter.write(any[Map[String,AttributeValue]]) returns List(new PutItemRequest(), new PutItemRequest(), new PutItemRequest())
val result = underTest.map(
SingleElement(Map(id.label -> "testLabel", ParsingContants.vertexType -> TestVertexType.label),
List(Map(ParsingContants.relationType -> TestEdgeType.label, targetId.label -> "GO:0048308"))))
result should have size 4
}
"return PutItemRequests for vertex and edges" in new context {
vertexWriter.write(any[Map[String,AttributeValue]]) returns List(new PutItemRequest())
edgeWriter.write(any[Map[String,AttributeValue]]) returns List(new PutItemRequest(), new PutItemRequest(), new PutItemRequest())
val result = underTest.map(SingleElement(Map(id.label -> "testLabel", ParsingContants.vertexType -> TestVertexType.label
),firstEdgesValues))
result should have size 10
}
"throw exception for unknown relation type" in new context {
vertexWriter.write(any[Map[String,AttributeValue]]) returns List(new PutItemRequest())
underTest.map(SingleElement(Map(id.label -> "testLabel", ParsingContants.vertexType -> TestVertexType.label),
List(Map(ParsingContants.relationType -> IsAType.label, targetId.label -> "GO:0048308")))) must throwA[NoSuchElementException]
}
}
trait context extends Scope {
val firstEdgesValues = List(
Map(ParsingContants.relationType -> TestEdgeType.label, targetId.label -> "GO:0048308"),
Map(ParsingContants.relationType -> TestEdgeType.label, targetId.label -> "GO:0048311"),
Map(ParsingContants.relationType -> TestEdgeType.label, targetId.label -> "biological_process")
)
val vertexWriter = mock[AnyVertexWriter]
val vertexWriters = Map(TestVertexType.label -> vertexWriter)
val edgeWriter = mock[AnyEdgeWriter]
val edgeWriters = Map(TestEdgeType.label -> edgeWriter)
val underTest = new GoMapper(vertexWriters, edgeWriters)
}
} | bio4j/dynamograph | src/test/scala/com/bio4j/dynamograph/mapper/GoMapperTest.scala | Scala | agpl-3.0 | 4,235 |
// Copyright (C) 2011-2012 the original author or authors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.scalastyle.scalariform
import org.scalatest.junit.AssertionsForJUnit
import org.scalastyle.file.CheckerTest
import org.junit.Test
// scalastyle:off magic.number
class NotImplementedErrorUsageTest extends AssertionsForJUnit with CheckerTest {
val key = "not.implemented.error.usage"
val classUnderTest = classOf[NotImplementedErrorUsage]
@Test
def noErrors(): Unit = {
val source = """
class X {
val x = 0
}
"""
assertErrors(Nil, source)
}
@Test
def notImplementedErrorFound(): Unit = {
val source = """
class X {
val x = ???
}
"""
assertErrors(List(columnError(3, 10)), source)
}
}
| kahosato/scalastyle | src/test/scala/org/scalastyle/scalariform/NotImplementedErrorUsageTest.scala | Scala | apache-2.0 | 1,378 |
package io.github.agormindustries.knowledgestand.core.handler
import io.github.agormindustries.knowledgestand.core.handler.event.InteractEvent
/**
* EventHandler
* Knowledge-Stand
* Created by: MartijnWoudstra
* Date: 21-apr-2014
* License: GPL v3
**/
object EventHandler {
val interactEvent: InteractEvent = new InteractEvent()
}
| AgormIndustries/Knowledge-Stand | src/main/scala/io/github/agormindustries/knowledgestand/core/handler/EventHandler.scala | Scala | gpl-3.0 | 341 |
package views.json.Tree
import java.util.Date
import org.joda.time.format.ISODateTimeFormat
import org.joda.time.DateTimeZone
import play.api.i18n.Messages
import play.api.libs.json.{Json,JsValue}
import scala.collection.mutable.Buffer
import com.overviewdocs.models.Tree
object show {
private val iso8601Format = ISODateTimeFormat.dateTimeNoMillis().withZone(DateTimeZone.UTC)
private def dateToISO8601(time: Date) : String = iso8601Format.print(time.getTime())
def apply(tree: Tree)(implicit messages: Messages): JsValue = {
val buffer = Buffer(
"nDocuments" -> tree.documentCount.toString,
"rootNodeId" -> tree.rootNodeId.toString,
"lang" -> tree.lang
)
if (tree.description.length > 0) buffer += ("description" -> tree.description)
if (tree.suppliedStopWords.length > 0) buffer += ("suppliedStopWords" -> tree.suppliedStopWords)
if (tree.importantWords.length > 0) buffer += ("importantWords" -> tree.importantWords)
val creationData = buffer.map((x: (String,String)) => Json.arr(x._1, x._2))
Json.obj(
"type" -> "tree",
"id" -> tree.id,
"nDocuments" -> tree.documentCount,
"title" -> tree.title,
"rootNodeId" -> tree.rootNodeId,
"progress" -> tree.progress,
"progressDescription" -> Messages("views.Tree.progressDescription." + tree.progressDescription),
"createdAt" -> dateToISO8601(tree.createdAt),
"creationData" -> creationData.toSeq
)
}
}
| overview/overview-server | web/app/views/Tree/show.json.scala | Scala | agpl-3.0 | 1,467 |
package rml.args.arg.decorator
import rml.args.config.reader.CommandlineArgReader
import rml.args.exceptions.IllegalArgException
import rml.args.config.FullConfig
import rml.args.arg.InputArg
import scala.util.Failure
import rml.args.config.reader.CommandlineArgReader
/**
* Reads an argument from an environment variable. This is usually used to read
* general arguments like HOME etc.
* Type information is taken from the wrapped argument.
*/
object Env {
def apply[T](arg: InputArg[T]): InputArg[T] = arg.mapLowLevel { (arg: InputArg[T], config: FullConfig) =>
val envVar = System.getenv(arg.key)
if(envVar == null){
Failure(new IllegalArgException(s"No values found for environment variable ${arg.key} found"))
} else {
val configWithEnv = config.over(arg.key, CommandlineArgReader.parseArgumentValues(envVar))
arg.apply(configWithEnv)
}
}
}
| rml/scala_args | src/main/scala/rml/args/arg/decorator/Env.scala | Scala | gpl-3.0 | 908 |
package example
import akka.http.scaladsl.marshalling._
import org.atnos.eff.Eff
import scala.concurrent.Future
trait EffectStackMarshaller {
implicit def effectStackMarshaller[A, B](implicit
m: Marshaller[Future[Result[A]], B]
): Marshaller[Eff[Stack, A], B] = Marshaller { implicit ec => eff => m(runEffect(eff)) }
}
object EffectStackMarshaller extends EffectStackMarshaller
| gabro/eff-api | src/main/scala/marshallers/EffectStackMarshaller.scala | Scala | mit | 389 |
package is.hail.methods
import java.io.{BufferedOutputStream, OutputStreamWriter}
import is.hail.HailContext
import is.hail.annotations.{UnsafeIndexedSeq, UnsafeRow}
import is.hail.backend.spark.SparkBackend
import is.hail.expr.TableAnnotationImpex
import is.hail.expr.ir.{ExecuteContext, MatrixValue}
import is.hail.expr.ir.functions.MatrixToValueFunction
import is.hail.types.{MatrixType, RTable, TypeWithRequiredness}
import is.hail.types.virtual.{TVoid, Type}
import is.hail.io.fs.FileStatus
import is.hail.utils._
import org.apache.spark.TaskContext
import org.apache.spark.sql.Row
case class MatrixExportEntriesByCol(parallelism: Int, path: String, bgzip: Boolean,
headerJsonInFile: Boolean, useStringKeyAsFileName: Boolean) extends MatrixToValueFunction {
def typ(childType: MatrixType): Type = TVoid
def unionRequiredness(childType: RTable, resultType: TypeWithRequiredness): Unit = ()
def execute(ctx: ExecuteContext, mv: MatrixValue): Any = {
val fs = ctx.fs
fs.delete(path, recursive = true) // overwrite by default
val padding = digitsNeeded(mv.nCols)
val fileNames: IndexedSeq[String] = if (useStringKeyAsFileName) {
val ids = mv.stringSampleIds
if (ids.toSet.size != ids.length) // there are duplicates
fatal("export_entries_by_col cannot export with 'use_string_key_as_file_name' with duplicate keys")
ids
} else
Array.tabulate(mv.nCols)(i => partFile(padding, i))
val allColValuesJSON = mv.colValues.javaValue.map(TableAnnotationImpex.exportAnnotation(_, mv.typ.colType)).toArray
val tempFolders = new ArrayBuilder[String]
info(s"exporting ${ mv.nCols } files in batches of $parallelism...")
val nBatches = (mv.nCols + parallelism - 1) / parallelism
val resultFiles = (0 until nBatches).flatMap { batch =>
val startIdx = parallelism * batch
val nCols = mv.nCols
val endIdx = math.min(nCols, parallelism * (batch + 1))
info(s"on batch ${ batch + 1 } of ${ nBatches }, columns $startIdx to ${ endIdx - 1 }...")
val d = digitsNeeded(mv.rvd.getNumPartitions)
val rvType = mv.rvd.rowPType
val entriesIdx = MatrixType.getEntriesIndex(rvType)
val entryArrayType = MatrixType.getEntryArrayType(rvType)
val entryType = MatrixType.getEntryType(rvType)
val partFileBase = path + "/tmp/"
val extension = if (bgzip) ".tsv.bgz" else ".tsv"
val localHeaderJsonInFile = headerJsonInFile
val colValuesJSON = HailContext.backend.broadcast(
(startIdx until endIdx)
.map(allColValuesJSON)
.toArray)
val fsBc = fs.broadcast
val localTempDir = ctx.localTmpdir
val partFolders = mv.rvd.crdd.cmapPartitionsWithIndex { (i, ctx, it) =>
val partFolder = partFileBase + partFile(d, i, TaskContext.get())
val filePaths = Array.tabulate(endIdx - startIdx) { j =>
val finalPath = partFolder + "/" + j.toString + extension
val tempPath = ExecuteContext.createTmpPathNoCleanup(localTempDir, "EEBC", extension = extension)
(tempPath, finalPath)
}
val fileHandles = filePaths.map { case (tmp, _) =>
new OutputStreamWriter(new BufferedOutputStream(fsBc.value.create(tmp)), "UTF-8")
}
if (i == 0) {
// write headers
val header = (
rvType.fieldNames.filter(_ != MatrixType.entriesIdentifier) ++ entryType.fieldNames
).mkString("\\t")
fileHandles.zipWithIndex.foreach { case (f, jj) =>
if (localHeaderJsonInFile) {
f.write('#')
f.write(colValuesJSON.value(jj))
f.write('\\n')
}
f.write(header)
f.write('\\n')
}
}
it.foreach { ptr =>
val entriesArray = new UnsafeIndexedSeq(entryArrayType, ctx.region, rvType.loadField(ptr, entriesIdx))
val fullRow = new UnsafeRow(rvType, ctx.region, ptr)
val rowFieldStrs = (0 until rvType.size)
.filter(_ != entriesIdx)
.map { rowFieldIdx =>
TableAnnotationImpex.exportAnnotation(fullRow(rowFieldIdx), rvType.types(rowFieldIdx).virtualType)
}.toArray
fileHandles.indices.foreach { fileIdx =>
val entryIdx = fileIdx + startIdx
val os = fileHandles(fileIdx)
rowFieldStrs.foreach { s =>
os.write(s)
os.write('\\t')
}
entriesArray(entryIdx) match {
case null =>
(0 until entryType.size).foreachBetween { _ =>
os.write("NA")
}(os.write('\\t'))
case r: Row =>
(0 until entryType.size).foreachBetween { entryFieldIdx =>
os.write(TableAnnotationImpex.exportAnnotation(r.get(entryFieldIdx), entryType.types(entryFieldIdx).virtualType))
}(os.write('\\t'))
}
os.write('\\n')
}
ctx.region.clear()
}
fileHandles.foreach { f =>
f.flush()
f.close()
}
filePaths.foreach { case (tempFile, destination) =>
fsBc.value.copy(tempFile, destination, deleteSource = true)
}
Iterator(partFolder)
}.collect()
val ns = endIdx - startIdx
val newFiles = mv.sparkContext.parallelize(0 until ns, numSlices = ns)
.map { sampleIdx =>
val partFilePath = path + "/" + partFile(digitsNeeded(nCols), sampleIdx, TaskContext.get)
val fileStatuses = partFolders.map(pf => fsBc.value.fileStatus(pf + s"/$sampleIdx" + extension))
fsBc.value.copyMergeList(fileStatuses, partFilePath, deleteSource = false)
partFilePath
}.collect()
tempFolders ++= partFolders
newFiles
}
val extension = if (bgzip) ".tsv.bgz" else ".tsv"
def finalPath(idx: Int): String = {
path + "/" + fileNames(idx) + extension
}
resultFiles.zipWithIndex.foreach { case (filePath, i) =>
fs.copy(filePath, finalPath(i), deleteSource = true)
}
fs.delete(path + "/tmp", recursive = true)
fs.writeTable(path + "/index.tsv", allColValuesJSON.zipWithIndex.map { case (json, i) =>
s"${ finalPath(i) }\\t$json"
})
info("Export finished. Cleaning up temporary files...")
// clean up temporary files
val temps = tempFolders.result()
val fsBc = fs.broadcast
SparkBackend.sparkContext("MatrixExportEntriesByCol.execute").parallelize(temps, (temps.length / 32).max(1)).foreach { path =>
fsBc.value.delete(path, recursive = true)
}
info("Done cleaning up temporary files.")
}
}
| danking/hail | hail/src/main/scala/is/hail/methods/MatrixExportEntriesByCol.scala | Scala | mit | 6,702 |
package model
import skinny.DBSettings
import skinny.test._
import org.scalatest.fixture.FlatSpec
import org.scalatest._
import scalikejdbc._
import scalikejdbc.scalatest._
import org.joda.time._
class AccessLogUserUniqueSpec extends FlatSpec with Matchers with DBSettings with AutoRollback {
}
| yoshitakes/skinny-task-example | src/test/scala/model/AccessLogUserUniqueSpec.scala | Scala | mit | 297 |
package college.semester
import college.CollegeEvent
import college.student.Student
sealed abstract class SemesterEvent extends CollegeEvent
case class ClassCreated(className: String) extends SemesterEvent
case class StudentEnrolled(student: Student.Id) extends SemesterEvent
case class StudentCancelled(studentId: Student.Id) extends SemesterEvent
| nilskp/delta | src/test/scala/college/semester/events.scala | Scala | mit | 352 |
package spray.oauth.adapters.slick
import spray.oauth.adapters.slick.models.UserDAO
import spray.oauth.models.GrantType
import spray.oauth._
/**
* Created by hasanozgan on 04/08/14.
*/
object SlickDataHandler extends OAuth2DataHandler {
override def getUser(username: String, password: String): Option[AuthUser] = ???
override def findAuthInfoByClient(clientId: String): Option[AuthInfo] = ???
override def deleteCode(code: String): Unit = ???
override def createAccessToken(authInfo: AuthInfo): Option[AccessToken] = ???
override def getClient(clientId: String, clientSecret: String): Option[AuthClient] = ???
override def getClient(clientId: String): Option[AuthClient] = ???
override def createCode(authInfo: AuthInfo): Option[String] = ???
override def findAuthInfoByRefreshToken(refreshToken: String): Option[AuthInfo] = ???
override def refreshAccessToken(authInfo: AuthInfo, refreshToken: String): Option[AccessToken] = ???
override def checkUserCredentials(username: String, password: String): Boolean = ???
override def findAuthInfoByUser(clientId: String, user: AuthUser, grantType: GrantType.Value): Option[AuthInfo] = ???
override def findAuthInfoByAccessToken(accessToken: String): Option[AuthInfo] = ???
override def findAuthInfoByCode(code: String): Option[AuthInfo] = ???
override def checkConsumerCredentials(clientId: String, clientSecret: String): Boolean = ???
override def findAccessToken(info: AuthInfo): Option[AccessToken] = ???
}
| hasanozgan/spray-oauth | adapters/slick-adapter/src/main/scala/spray/oauth/adapters/slick/SlickDataHandler.scala | Scala | apache-2.0 | 1,503 |
import javax.servlet.ServletContext
import com.typesafe.scalalogging.LazyLogging
import org.scalatra.{LifeCycle}
import slick.driver.H2Driver.api.Database
import io.aigar.controller.{AdminController, GameController, LeaderboardController}
import io.aigar.game.GameThread
import io.aigar.model._
import io.aigar.score.ScoreThread
object ScalatraBootstrap {
final val PasswordLength = 28
}
class ScalatraBootstrap extends LifeCycle
with LazyLogging {
logger.info("Bootstrapping application.")
var playerRepository: PlayerRepository = null
var scoreRepository: ScoreRepository = null
var game: GameThread = null
var scoreThread: ScoreThread = null
final val adminPassword = (new scala.util.Random(new java.security.SecureRandom())).alphanumeric.take(ScalatraBootstrap.PasswordLength).mkString
final val version = "/1"
final val path = s"/api$version"
override def init(context: ServletContext): Unit = {
val database = AigarDatabase.createDatabase(AigarDatabase.getRandomName, false)
appInit(database, false)
logger.info("****************************")
logger.info("***Administrator password***")
logger.info(adminPassword)
logger.info("****************************")
context.mount(new AdminController(adminPassword, game, playerRepository, scoreRepository), s"$path/admin/*")
context.mount(new LeaderboardController(game, playerRepository), s"$path/leaderboard/*")
context.mount(new GameController(game, playerRepository), s"$path/game/*")
}
/*
* Separated method for testing purposes.
*/
def appInit(database: Database, testing: Boolean): Unit = {
playerRepository = new PlayerRepository(database)
scoreRepository = new ScoreRepository(database)
scoreThread = new ScoreThread(scoreRepository)
game = new GameThread(scoreThread)
if (!testing) {
launchThreads
}
}
private def closeDbConnection: Unit = {
AigarDatabase.closeConnection
}
override def destroy(context: ServletContext): Unit = {
super.destroy(context)
closeDbConnection
scoreThread.running = false
game.running = false
}
def launchThreads: Unit = {
new Thread(scoreThread).start
new Thread(game).start
}
}
| DrPandemic/aigar.io | game/src/main/scala/ScalatraBootstrap.scala | Scala | mit | 2,243 |
package net.akmorrow13.endive.processing
import net.akmorrow13.endive.EndiveFunSuite
import scala.collection.mutable.ListBuffer
class ShuffleSuite extends EndiveFunSuite {
test("graph creation") {
val testSeq = "AAACCC"
val graph = Graph.form_seq_graph(testSeq)
assert(graph('A').filter(_ =='A').length == 2)
assert(graph('C').filter(_ =='A').length == 0)
assert(graph('A').filter(_ =='C').length == 1)
assert(graph('C').filter(_ =='C').length == 2)
}
test("generate shuffled sequence") {
val testSeq =
"CACACCGCACTCCCCAGCAGAAGGCTGCAATCCCACCTCTCTGATACAACCCTGCGCCTTGAGATGCAATCTAAACTAGGACTCTTGGTACCTTATCAAAC"
val shuffled = DinucleotideShuffle.doublet_shuffle(testSeq)
val strComp = "CAACTGGAGCTGATCTAGCCGACTACCTGCACTGTCTGCCACCCTCTTCTTCCCAACATGAGCTTACATAAACCCCATAAAGCATCAAGGGACTCCAACGC"
}
} | akmorrow13/endive | src/test/scala/net/akmorrow13/endive/processing/ShuffleSuite.scala | Scala | apache-2.0 | 845 |
// AORTA is copyright (C) 2012 Dustin Carlino, Mike Depinet, and Piyush
// Khandelwal of UT Austin
// License: GNU GPL v2
package utexas.aorta.ui
import swing._ // TODO figure out exactly what
import java.awt.{Color, Component}
import swing.Dialog
import javax.swing.WindowConstants
import java.io.File
import utexas.aorta.sim.{Simulation, EV_Heartbeat}
import utexas.aorta.map.Graph
import utexas.aorta.analysis.SimREPL
import utexas.aorta.common.{Util, cfg}
class StatusBar() {
val zoom = new Label("1.0") // TODO from cfg
val agents = new Label("0 moved / 0 live / 0 ready")
val time = new Label("0.0")
val sim_speed = new Label("Paused / 1x")
val panel = new GridBagPanel {
maximumSize = new Dimension(Int.MaxValue, 10)
border = Swing.MatteBorder(5, 5, 5, 5, Color.BLACK)
// TODO generate these?
// all of this to prevent the rightmost 'At' column from spazzing out when
// the text changes length
// row 1: labels
val c = new Constraints
c.gridx = 0
c.gridy = 0
c.ipadx = 50
layout(new Label("Zoom")) = c
c.gridx = 1
layout(new Label("Agents Active/Ready/Routing")) = c
c.gridx = 2
layout(new Label("Time")) = c
c.gridx = 3
layout(new Label("Sim Speed (Actual/Cap)")) = c
// row 2: columns
c.weightx = 0.0
c.ipadx = 50
c.gridx = 0
c.gridy = 1
layout(zoom) = c
c.gridx = 1
layout(agents) = c
c.gridx = 2
layout(time) = c
c.gridx = 3
layout(sim_speed) = c
}
}
// TODO SwingApplication has a startup, quit, shutdown...
object GUI extends SimpleSwingApplication {
val road_types = List(
"null", "residential", "unclassified", "secondary",
"motorway_link", "motorway", "trunk_link", "secondary_link", "primary_link",
"tertiary", "primary", "service"
)
// null just because it's parametric from argv
var primary_canvas_2d: MapCanvas = null
// for side-by-side mode
var secondary_canvas_2d: MapCanvas = null
val layer_menu = new Menu("Road Color Layer")
def side_by_side = secondary_canvas_2d != null
// TODO use this finally?
val helper = new BoxPanel(Orientation.Vertical) {
preferredSize = new Dimension(Int.MaxValue, Int.MaxValue)
border = Swing.MatteBorder(5, 5, 5, 5, Color.BLACK)
yLayoutAlignment = java.awt.Component.TOP_ALIGNMENT
// TODO These're fixed now, but the idea is to tie them to configuration and
// add/remove some context-sensitively. And also organize better.
// Simulation controls
contents += new Label("p pause/resume")
contents += new Label("[ slow down time")
contents += new Label("] speed up time")
contents += new Label("- slown down time faster")
contents += new Label("= speed up time faster")
// Actions
contents += new Label("m make new agent on current edge")
contents += new Label("c choose edge for pathfinding")
contents += new Label("d object-sensitive debug")
contents += new Label("f follow agent")
contents += new Label("x delete agent (may crash!)")
// Polygons
contents += new Label("Shift+Left click draw a polygon")
contents += new Label("Shift+s begin/end agents in polygon")
contents += new Label("Shift+p change intersection policies")
// View
contents += new Label("r reset view")
contents += new Label("g toggle greenflood colors")
contents += new Label("CTRL cycle through turns")
contents += new Label("arrow keys pan")
// TODO expand to fill the whole column, or otherwise work on aesthetics
// TODO and option to hide the panel
}
private var headless = false
var closed = false
override def main(args: Array[String]) {
// TODO this'll clobber one sim and one flag, right?
if (args.size == 2) {
primary_canvas_2d = new MapCanvas(Util.process_args(Array(args(0))))
// TODO slightly messy hack for now.
Graph.fresh_copy = true
secondary_canvas_2d = new MapCanvas(Util.process_args(Array(args(1))))
Graph.fresh_copy = false
} else {
primary_canvas_2d = new MapCanvas(Util.process_args(args))
}
if (!headless) {
// Fire steps every now and then. Secondary sim will stay synched to primary.
new Thread {
override def run() {
while (true) {
val state = primary_canvas_2d.get_state
if (state.running && state.speed_cap > 0) {
val start_time = System.currentTimeMillis
primary_canvas_2d.sim.step()
if (secondary_canvas_2d == null) {
primary_canvas_2d.rerender()
} else {
secondary_canvas_2d.sim.step()
primary_canvas_2d.rerender()
secondary_canvas_2d.rerender()
}
// Rate-limit, if need be.
// In order to make speed_cap ticks per second, each tick needs to
// last 1000 / speed_cap milliseconds.
val goal =
if (state.speed_cap > 0)
(1000 / state.speed_cap).toInt
else
0
val dt_ms = System.currentTimeMillis - start_time
if (dt_ms < goal) {
// Ahead of schedule. Sleep.
Thread.sleep(goal - dt_ms)
}
} else {
// Just avoid thrashing the CPU.
Thread.sleep(100)
}
}
}
}.start()
}
// TODO doesnt start drawn correctly!
primary_canvas_2d.repaint
super.main(args)
}
def launch_from_headless(canvas: MapCanvas) {
headless = true
primary_canvas_2d = canvas
super.main(Array())
}
def top = new MainFrame {
title = "AORTA"
preferredSize = new Dimension(800, 600)
peer.setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE)
override def closeOperation() {
if (headless) {
println("Closing GUI...")
close
closed = true
} else {
sys.exit
}
}
menuBar = new MenuBar {
contents += new Menu("File") {
contents += new Separator
contents += new MenuItem(Action("Quit") {
sys.exit
})
}
contents += new Menu("View") {
contents += new Menu("Highlight type of road") {
contents ++= road_types.map(t => new MenuItem(Action(t) {
primary_canvas_2d.handle_ev(EV_Param_Set("highlight", Some(t)))
}))
}
contents += new MenuItem(Action("Clear all highlighting") {
primary_canvas_2d.handle_ev(EV_Param_Set("highlight", None))
})
}
contents += new Menu("Query") {
contents += new MenuItem(Action("Teleport to Edge") {
primary_canvas_2d.handle_ev(EV_Action("teleport-edge"))
})
contents += new MenuItem(Action("Teleport to Road") {
primary_canvas_2d.handle_ev(EV_Action("teleport-road"))
})
contents += new MenuItem(Action("Teleport to Agent") {
primary_canvas_2d.handle_ev(EV_Action("teleport-agent"))
})
contents += new MenuItem(Action("Teleport to Vertex") {
primary_canvas_2d.handle_ev(EV_Action("teleport-vertex"))
})
contents += new MenuItem(Action("Clear Route") {
primary_canvas_2d.handle_ev(EV_Action("clear-route"))
})
}
contents += layer_menu
}
val main_split = new BorderPanel {
background = Color.LIGHT_GRAY
border = Swing.MatteBorder(2, 2, 2, 2, Color.RED)
add(primary_canvas_2d.statusbar.panel, BorderPanel.Position.North)
add(primary_canvas_2d, BorderPanel.Position.Center)
}
if (side_by_side) {
val secondary_split = new BorderPanel {
background = Color.LIGHT_GRAY
border = Swing.MatteBorder(2, 2, 2, 2, Color.RED)
add(secondary_canvas_2d.statusbar.panel, BorderPanel.Position.North)
add(secondary_canvas_2d, BorderPanel.Position.Center)
}
// TODO resizing entire window doesn't work great yet
contents = new SplitPane(Orientation.Vertical, main_split, secondary_split) {
dividerLocation = 400
}
} else {
contents = main_split
}
}
}
// TODO make pause work with whoever's calling us
class GUIDebugger(sim: Simulation) {
// When this file exists, launch a GUI for sudden interactive watching.
private val gui_signal = new File(".headless_gui")
private var gui: Option[MapCanvas] = None
sim.listen(classOf[EV_Heartbeat], _ match { case e: EV_Heartbeat => {
if (gui_signal.exists) {
gui_signal.delete()
gui match {
case Some(ui) => {
if (GUI.closed) {
println("Resuming the GUI...")
GUI.top.open()
GUI.closed = false
}
}
case None => {
println("Launching the GUI...")
gui = Some(new MapCanvas(sim))
GUI.launch_from_headless(gui.get)
}
}
}
gui match {
case Some(ui) if !GUI.closed => ui.handle_ev(EV_Action("render"))
case _ =>
}
}})
}
class GUIREPL(sim: Simulation) extends SimREPL(sim) {
override def welcome() {
super.welcome()
e.interpret("val gui = utexas.aorta.ui.GUI.canvas_2d")
println("The GUI is bound to 'gui'.")
}
}
| dabreegster/aorta | utexas/aorta/ui/GUI.scala | Scala | gpl-2.0 | 9,349 |
package com.thoughtworks.sbt.sbteo
import com.thoughtworks.sbt.sbteo.Api.CursorPosition
import com.thoughtworks.sbt.sbteo.steps.{GivenApi, GivenBasicSource, GivenCompiler}
import org.specs2.mutable._
import org.specs2.specification.Scope
class ApiSpecs extends Specification {
sequential
"An api" should {
"with basic settings" should {
"autocomplete on a basic file" should {
trait Subject extends GivenApi with GivenBasicSource with GivenCompiler with Scope {
def suggestionsAt(row:Int, col: Int) = {
api.autocomplete(sourceDocument.split("\\n"), new CursorPosition(row,col))
}
}
"the suggestions at import statement" should {
"not suggest local vars" in new Subject {
suggestionsAt(1, 1).left.get.map(ac => ac.symbol) must not contain "x"
}
"suggest classes" in new Subject {
suggestionsAt(1,1).left.get.map(ac => ac.symbol) must contain("X")
}
}
"the suggestions at inside a() method" should {
"suggest variables" in new Subject {
suggestionsAt(4,8) must beLeft
}
"contain a local variable" in new Subject {
suggestionsAt(4, 8).left.get.map(ac => ac.symbol) must contain("x")
}
}
}
}
}
}
| thoughtworks/sbteo | src/test/scala/com/thoughtworks/sbt/sbteo/ApiSpecs.scala | Scala | apache-2.0 | 1,329 |
// Copyright (C) 2011-2012 the original author or authors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.scalastyle
import java.io.File
import java.util.Date
import scala.io.Codec
import com.typesafe.config.ConfigFactory
import java.net.URLClassLoader
import java.net.URL
case class MainConfig(error: Boolean,
config: Option[String] = None,
directories: List[String] = List(),
verbose: Boolean = false,
quiet: Boolean = false,
warningsaserrors: Boolean = false,
xmlFile: Option[String] = None,
xmlEncoding: Option[String] = None,
inputEncoding: Option[String] = None,
externalJar: Option[String] = None,
excludedFiles: Seq[String] = Nil)
object Main {
// scalastyle:off regex
private def usage(version: String) = {
println("scalastyle " + version)
println("Usage: scalastyle [options] <source directory>")
println(" -c, --config FILE configuration file (required)")
println(" -v, --verbose true|false verbose output")
println(" -q, --quiet true|false be quiet")
println(" --xmlOutput FILE write checkstyle format output to this file")
println(" --xmlEncoding STRING encoding to use for the xml file")
println(" --inputEncoding STRING encoding for the source files")
println(" -w, --warnings true|false fail if there are warnings")
println(" -e, --externalJar FILE jar containing custom rules")
println(" -x, --excludedFiles STRING regular expressions to exclude file paths (delimitted by semicolons)")
System.exit(1)
}
// scalastyle:on regex
private def isTrue(s: String) = "true" equalsIgnoreCase s
def parseArgs(args: Array[String]): MainConfig = {
var config = MainConfig(false)
var i = 0
while (i < args.length) {
if (args(i).startsWith("-") && i < args.length - 1) {
args(i) match {
case ("-c" | "--config") => config = config.copy(config = Some(args(i + 1)))
case ("-v" | "--verbose") => config = config.copy(verbose = isTrue(args(i + 1)))
case ("-q" | "--quiet") => config = config.copy(quiet = isTrue(args(i + 1)))
case ("-w" | "--warnings") => config = config.copy(warningsaserrors = isTrue(args(i + 1)))
case ("--xmlOutput") => config = config.copy(xmlFile = Some(args(i + 1)))
case ("--xmlEncoding") => config = config.copy(xmlEncoding = Some(args(i + 1)))
case ("--inputEncoding") => config = config.copy(inputEncoding = Some(args(i + 1)))
case ("-e" | "--externalJar") => config = config.copy(externalJar = Some(args(i + 1)))
case ("-x" | "--excludedFiles") => config = config.copy(excludedFiles = args(i + 1).split(";"))
case _ => config = config.copy(error = true)
}
i = i + 2
} else {
config = config.copy(directories = args(i) :: config.directories)
i = i + 1
}
}
if (!config.config.isDefined || config.directories.size == 0) {
config = config.copy(error = true)
}
config
}
def main(args: Array[String]): Unit = {
val config = parseArgs(args)
val exitVal = {
if (config.error) {
usage(BuildInfo.version)
1
} else {
if (execute(config)) 1 else 0
}
}
System.exit(exitVal)
}
private[this] def now(): Long = new Date().getTime()
private[this] def execute(mc: MainConfig)(implicit codec: Codec): Boolean = {
val start = now()
val configuration = ScalastyleConfiguration.readFromXml(mc.config.get)
val cl = mc.externalJar.flatMap(j => Some(new URLClassLoader(Array(new java.io.File(j).toURI().toURL()))))
val messages = new ScalastyleChecker(cl).checkFiles(configuration, Directory.getFiles(mc.inputEncoding, mc.directories.map(new File(_)).toSeq, excludedFiles=mc.excludedFiles))
// scalastyle:off regex
val config = ConfigFactory.load(cl.getOrElse(this.getClass().getClassLoader()))
val outputResult = new TextOutput(config, mc.verbose, mc.quiet).output(messages)
mc.xmlFile match {
case Some(x) => {
val encoding = mc.xmlEncoding.getOrElse(codec.charSet).toString
XmlOutput.save(config, x, encoding, messages)
}
case None =>
}
if (!mc.quiet) println("Processed " + outputResult.files + " file(s)")
if (!mc.quiet) println("Found " + outputResult.errors + " errors")
if (!mc.quiet) println("Found " + outputResult.warnings + " warnings")
if (!mc.quiet) println("Finished in " + (now - start) + " ms")
// scalastyle:on regex
outputResult.errors > 0 || (mc.warningsaserrors && outputResult.warnings > 0)
}
}
| asaitov/scalastyle | src/main/scala/org/scalastyle/Main.scala | Scala | apache-2.0 | 5,314 |
import scalatags.Text.all._
object TestPageAssets {
def index: String = {
"<!DOCTYPE html>" + html(
head(
base(href := "/"),
meta(name := "viewport", content := "width=device-width, initial-scale=1.0")
),
body(
// Empty
)
)
}
} | Karasiq/scalajs-videojs | project/TestPageAssets.scala | Scala | mit | 288 |
package com.citypay.pan.search
import com.citypay.pan.search.io.IndexedByteBuffer
import com.citypay.pan.search.util.{Location, LuhnCheck, Tracer}
/**
* Directly identifies the progress of inspection
*/
trait InspectionResult {}
final case class InspectedProposedFind(value: String, offset: Int, from: Location, to: Location) extends InspectionResult
final case class InspectedPotential(offset: Int, from: Location, to: Location) extends InspectionResult
object InspectionNoMatch extends InspectionResult {
override def toString: String = "InspectionNoMatch"
}
/**
* Search for a pan spec by scanning the inspection buffer provided from a reviewer
*/
object InspectionScanner {
def isPrefixDelimiter(b: Byte): Boolean = b < '0' || b > '9' && b < 'A' || b > 'Z' && b < 'a' || b > 'z'
/**
* Runs an inspection of collated data
*
* @param label a label for the inspection, primarily for tracing
* @param spec the spec to search against
* @param inspectionBuffer the inspection buffer which contains proposed data
* @param offset an offset of the bs array to inspect
* @param limit the limit to which the review should hold, this is generally to where we
* have yet gathered data to
* @return an either value of
* left, Boolean -> true if the inspection should continue, i.e. its a potental match and requires more data
* left, Boolean -> false this is definately not a match
* right, String -> the pan which is looked to have matched
*/
def apply(label: String,
spec: PanSpec,
inspectionBuffer: IndexedByteBuffer,
offset: Int,
limit: Int
): InspectionResult = {
val _tracer = new Tracer("inspectionScanner")
import _tracer._
def scan(): InspectionResult = {
var startLine: Int = 0
var startCol: Int = 0
for (i <- offset until limit) {
val b = inspectionBuffer.get(i)
trace("InspectionScanner: %s, spec=%s, i=%06d, offset=%06d, limit=%06d, b=%s (0x%02X)", label, spec, i, offset, limit, b.toChar, b)
// breakout if non numeric
if (b < 0x30 || b > 0x39) {
//noinspection ScalaStyle
return InspectionNoMatch
}
// search using a linear approach O(nm), any search methods such as KMP/BM are inappropriate for this context
if (i - offset < spec.leadingLen) {
trace(", leading")
// if we are in the leading range, the value must be a direct match, continue or breakout as false
if (spec.leadingBytes(i - offset) != b) {
trace(", mismatch (%02X != %02X)", spec.leadingBytes(i - offset), b)
//noinspection ScalaStyle
return InspectionNoMatch
}
if (startLine == 0) {
startLine = inspectionBuffer.channelIndexLineNo(i)
startCol = inspectionBuffer.channelIndexColNo(i)
}
// if we are at the length, then run a luhn check to verify
} else if (i >= spec.length - 1) {
trace(s"proposed(offset=$offset, len=${i + 1}, arrlen=(${inspectionBuffer.length}))")
val proposed = inspectionBuffer.toString(offset, i + 1)
if (tracer(", luhn")(LuhnCheck(proposed))) {
// the proposed value has passed a luhn check
// to prevent false positives, ensure that the value before the offset is a non alpha numeric value
// a lot of false positives can be found in id strings such as session ids or UIDs
if (offset > 0) {
val prefix = inspectionBuffer.get(offset - 1)
if (isPrefixDelimiter(prefix)) {
//noinspection ScalaStyle
return InspectedProposedFind(proposed, offset, Location(startLine, startCol),
Location(inspectionBuffer.channelIndexLineNo(i), inspectionBuffer.channelIndexColNo(i))
)
} else {
// False positive
return InspectionNoMatch
}
} else {
//noinspection ScalaStyle
return InspectedProposedFind(proposed, offset, Location(startLine, startCol),
Location(inspectionBuffer.channelIndexLineNo(i), inspectionBuffer.channelIndexColNo(i))
)
}
} else if (i >= spec.maxLength - 1) {
traceEnd(", len-exhausted")
//noinspection ScalaStyle
return InspectionNoMatch
}
}
traceEnd(", matching...")
}
// if nothing yet found, return as potential
InspectedPotential(offset, Location(startLine, startCol),
Location(inspectionBuffer.channelIndexLineNo(limit), inspectionBuffer.channelIndexColNo(limit))
)
}
try {
val result = scan()
trace(s" returning Inspection Result -> $result")
result
} finally {
traceEnd()
}
}
}
| citypay/citypay-pan-search | src/main/scala/com/citypay/pan/search/InspectionScanner.scala | Scala | mit | 5,041 |
package vonsim.webapp.i18n
import vonsim.simulator._
object Spanish{
def code="es"
}
class Spanish extends UILanguage {
def code =Spanish.code
def and="y"
def iconTitle=appName+": "+pageTitle
def pageTitle="Un simulador de la arquitectura 8088"
def pageTitleExtended="Un simulador simplificado de la arquitectura 8088, similar al MSX88"
def alertURLNotFound(url:String)="No se pudo cargar la URL "+url
def helpGithubPage="Github"
def helpReportIssue="Reportar error"
def helpIntendedFor="Este simulador fue diseñado para su uso en las siguientes asignaturas de la Universidad Nacional de La Plata:"
def helpMadeBy="Hecho por"
def helpWithHelpFrom="con ayuda de"
def helpFeedbackWelcome="Los comentarios son bienvenidos en"
def controlsDebugButton="Depurar"
def controlsDebugTooltip=s"$controlsDebugOrAbortHotkey: Cargar el programa en la memoria sin comenzar la ejecución, para realizar una ejecución paso a paso."
def controlsStopButton="Abortar"
def controlsStopTooltip= s"$controlsDebugOrAbortHotkey: Abortar ejecución y salir del modo de depuración."
def controlsQuickButton="Ejecución Rápida"
def controlsQuickTooltip=s"$controlsQuickHotkey: Reiniciar simulador, cargar el programa en la memoria, y ejecutar hasta que la CPU se detenga."
def controlsStepButton="Paso"
def controlsStepTooltip=s"$controlsStepHotkey: Ejecutar la próxima instrucción."
def controlsFinishButton="Finalizar"
def controlsFinishTooltip= s"$controlsFinishHotkey: Ejecutar el programa hasta que la CPU se detenga."
def stateToMessage(state:SimulatorState)= state match{
case SimulatorExecutionError(msg) => "Error de ejecución"
case SimulatorExecutionFinished => "Ejecución finalizada"
case SimulatorExecutionStopped => "No hay programa cargado"
case SimulatorProgramExecuting => "Programa en ejecución"
}
def stateToTooltip(state:SimulatorState)= state match{
case SimulatorExecutionError(error) => "La ejecución se ha detenido por el siguiente error: "+error.message
case SimulatorExecutionFinished => "La ejecución ha finalizado sin errores."
case SimulatorExecutionStopped => "No hay un programa cargado en el simulador. Realiza una ejecución rápida o inicia el modo de depuración."
case SimulatorProgramExecuting => s"""El programa está ejecutándose en modo depuración.
Podés ejecutar instrucciones una a la vez con $controlsStepButton, o ejecutar el programa hasta que termine con $controlsFinishButton.
Mientras el programa está ejecutándose no se puede modificar el código en el editor."""
}
def cpuFlagDescription(f:Flag,v:String) = {
val description=f match{
case C => s"El flag $f, por (C)arry, toma el valor 1 cuando hay carry o borrow al realizar la operación. Si los operandos se interpretan en BSS, implica que el resultado es erróneo, ya que era o un número negativo si hay borrow o un número con más bits de los disponibles is hay carry."
case Z => s"El flag $f, por (Z)ero, toma el valor 1 cuando todos los bits del resultado son 0."
case O => s"El flag $f, por (O)verflow, toma el valor 1 cuando el resultado es erróneo si se interpretan los números en el sistema CA2. "
case S => s"El flag $f, por (S)ign, toma el valor 1 si el bit de más a la izquierda del resultado es un 1. Si el resultado se interpreta en CA2, implica que el mismo es negativo."
}
s"El flag $f tiene el valor $v.\n"+description
}
def flags="Flags"
def aluTitle="ALU"
def cpuTitle="CPU"
def cpuSpecialRegisters="Registros Especiales"
def cpuGeneralPurposeRegisters="Registros de Propósito General"
def memoryTitle="Memoria"
def addressSearch="Ver dirección de memoria"
def alertCompilationFailed="La compilación ha fallado, no se puede cargar el programa."
def describeInstruction(i:Instruction)="Instrucción correcta."
def describeMemoryCell(address:Int,value:Word)={
s"""Celda de memoria con dirección ${formatAddress(address)}h y valor:
Hexadecimal: ${formatWord(value)}h
Binario: ${value.bitString.reverse}
CA2: ${value.toInt}
BSS: ${value.toUnsignedInt}
"""
}
} | facundoq/vonsim | src/main/scala/vonsim/webapp/i18n/Spanish.scala | Scala | agpl-3.0 | 4,177 |
package org.automanlang.core.policy.aggregation
// this is kind of a dirty hack for now
trait MinimumSpawnPolicy {
def min: Int
}
| dbarowy/AutoMan | libautoman/src/main/scala/org/automanlang/core/policy/aggregation/MinimumSpawnPolicy.scala | Scala | gpl-2.0 | 133 |
package netcaty.tcp.client
import io.netty.channel.ChannelInitializer
import io.netty.channel.socket.SocketChannel
import io.netty.handler.codec.FixedLengthFrameDecoder
import io.netty.util.concurrent.Promise
import netcaty.{tcp, Ssl}
class PipelineInitializer(
https: Boolean, responseLength: Int, resPromise_or_handler: Either[Promise[Array[Byte]], tcp.ResponseHandler]
) extends ChannelInitializer[SocketChannel]
{
def initChannel(ch: SocketChannel) {
val p = ch.pipeline
if (https) p.addLast(Ssl.clientContext.newHandler(ch.alloc))
p.addLast(
new FixedLengthFrameDecoder(responseLength),
new ResponseHandler(resPromise_or_handler)
)
}
}
| ngocdaothanh/netcaty | src/main/scala/netcaty/tcp/client/PipelineInitializer.scala | Scala | mit | 680 |
package scala.concurrent.duration
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import org.junit.Test
@RunWith(classOf[JUnit4])
class SerializationTest {
@Test
def test_SI9197: Unit = {
def ser(a: AnyRef): Array[Byte] = {
val bais = new java.io.ByteArrayOutputStream
(new java.io.ObjectOutputStream(bais)).writeObject(a)
bais.toByteArray
}
def des(ab: Array[Byte]): AnyRef =
(new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(ab))).readObject
assert(Duration.Undefined eq des(ser(Duration.Undefined)))
assert(Duration.Inf eq des(ser(Duration.Inf)))
assert(Duration.MinusInf eq des(ser(Duration.MinusInf)))
}
}
| martijnhoekstra/scala | test/junit/scala/concurrent/duration/SerializationTest.scala | Scala | apache-2.0 | 703 |
package textengine.messengers
class SortMessenger {
val duringMessage = "Sorting lines, please wait"
val successMessage = "Done sorting"
val failureMessage = "Failed to sort"
}
object SortMessenger {
def apply(): SortMessenger = new SortMessenger()
} | billpcs/LithePad | src/main/scala/textengine/messengers/SortMessenger.scala | Scala | mit | 270 |
package utils.azure
import java.io.{ File, FileInputStream }
import java.util.Date
import com.microsoft.azure.storage.CloudStorageAccount
import com.google.inject.Inject
import play.Logger
import utils.Helper
import scala.concurrent.Future
/**
* Created by jlzie on 14.04.2017.
*/
class BlobStorage @Inject() (configuration: play.api.Configuration) {
import play.api.libs.concurrent.Execution.Implicits.defaultContext
// Define the connection-string with your values
lazy val storageAccount = CloudStorageAccount.parse(configuration.underlying.getString("octotagger.azure.pictureblob.connection.string"))
def delete(url: String): Future[Boolean] = Future[Boolean] {
try {
val blobClient = storageAccount.createCloudBlobClient
val container = blobClient.getContainerReference("pictures")
val blob = container.getBlockBlobReference(url.substring(url.lastIndexOf('/') + 1))
blob.deleteIfExists()
} catch {
case e: Exception =>
// Output the stack trace.
e.printStackTrace()
throw e
}
}
def upload(file: File, mimeType: String): Future[(String, Date)] = scala.concurrent.Future[(String, Date)] {
try { // Retrieve storage account from connection-string.
// Create the blob client.
val blobClient = storageAccount.createCloudBlobClient
// Retrieve reference to a previously created container.
val container = blobClient.getContainerReference("pictures")
val name = Helper.randomAlphanumericString(40)
val extension = mimeType match {
case "image/jpeg" => ".jpg"
case "image/png" => ".png"
}
val blob = container.getBlockBlobReference(name + extension)
blob.upload(new FileInputStream(file), file.length())
val props = blob.getProperties()
props.setContentType(mimeType)
blob.uploadProperties()
(blob.getUri.toString, props.getLastModified)
} catch {
case e: Exception =>
// Output the stack trace.
e.printStackTrace()
throw e
}
}
}
| SwaggerTagger/octo-tagger-backend | app/utils/azure/BlobStorage.scala | Scala | mit | 2,050 |
/*
* Copyright (c) 2014-2015 by its authors. Some rights reserved.
* See the project homepage at: http://www.monifu.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monifu.reactive.internals.operators
import minitest.TestSuite
import monifu.concurrent.schedulers.TestScheduler
import monifu.reactive.Ack.Continue
import monifu.reactive.exceptions.DummyException
import monifu.reactive.{Observer, Observable}
object MiscCompleteSuite extends TestSuite[TestScheduler] {
def setup() = TestScheduler()
def tearDown(s: TestScheduler) = {
assert(s.state.get.tasks.isEmpty,
"TestScheduler should have no pending tasks")
}
test("should complete") { implicit s =>
var received = 0
var wasCompleted = false
Observable.unit(1).ignoreElements.onSubscribe(new Observer[Long] {
def onNext(elem: Long) = {
received += 1
Continue
}
def onError(ex: Throwable) = ()
def onComplete() = wasCompleted = true
})
assertEquals(received, 0)
assert(wasCompleted)
}
test("should signal error") { implicit s =>
var thrown: Throwable = null
Observable.error(DummyException("dummy")).ignoreElements.onSubscribe(
new Observer[Long] {
def onError(ex: Throwable) = thrown = ex
def onComplete() = ()
def onNext(elem: Long) = Continue
})
assertEquals(thrown, DummyException("dummy"))
}
}
| sergius/monifu | monifu/shared/src/test/scala/monifu/reactive/internals/operators/MiscCompleteSuite.scala | Scala | apache-2.0 | 1,926 |
import sbt._
object Dependencies {
val allResolvers = Seq(
Resolver.url("file://" + Path.userHome.absolutePath + "/.ivy/local"),
"sonatype" at "https://oss.sonatype.org/content/repositories/releases")
//val frontendDeps = Seq("org.scala-js" %%% "scalajs-dom" % "0.8.1")
val scalatest = "org.scalatest" %% "scalatest" % "latest.release"
//val scalaJsDependencies = Seq("org.scala-js" %%% "scalajs-dom" % "0.8.1")
val testDeps = Seq(scalatest % Test)
}
| nightscape/scala-vdom | project/Dependencies.scala | Scala | apache-2.0 | 478 |
/**
* This file is part of mycollab-scheduler.
*
* mycollab-scheduler is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* mycollab-scheduler is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with mycollab-scheduler. If not, see <http://www.gnu.org/licenses/>.
*/
package com.esofthead.mycollab.schedule.email.project.service
import com.esofthead.mycollab.common.NotificationType
import com.esofthead.mycollab.common.domain.{MailRecipientField, SimpleAuditLog, SimpleRelayEmailNotification}
import com.esofthead.mycollab.common.service.AuditLogService
import com.esofthead.mycollab.configuration.SiteConfiguration
import com.esofthead.mycollab.module.mail.service.{IContentGenerator, ExtMailService}
import com.esofthead.mycollab.module.mail.MailUtils
import com.esofthead.mycollab.module.project.domain.{ProjectNotificationSetting, ProjectRelayEmailNotification}
import com.esofthead.mycollab.module.project.service.{ProjectMemberService, ProjectNotificationSettingService}
import com.esofthead.mycollab.module.user.domain.SimpleUser
import com.esofthead.mycollab.schedule.email.{ItemFieldMapper, MailContext, SendingRelayEmailNotificationAction}
import org.springframework.beans.factory.annotation.Autowired
/**
* @author MyCollab Ltd.
* @since 4.6.0
*/
abstract class SendMailToAllMembersAction[B] extends SendingRelayEmailNotificationAction {
@Autowired var extMailService: ExtMailService = _
@Autowired var projectMemberService: ProjectMemberService = _
@Autowired var projectNotificationService: ProjectNotificationSettingService = _
@Autowired var auditLogService: AuditLogService = _
@Autowired protected var contentGenerator: IContentGenerator = _
protected var bean: B = _
protected var siteUrl: String = _
private def getNotifyUsers(notification: ProjectRelayEmailNotification): Set[SimpleUser] = {
import scala.collection.JavaConverters._
var notifyUsers: Set[SimpleUser] = projectMemberService.getActiveUsersInProject(notification.getProjectId,
notification.getSaccountid).asScala.toSet
val notificationSettings: List[ProjectNotificationSetting] = projectNotificationService.findNotifications(notification.getProjectId,
notification.getSaccountid).asScala.toList
if (notificationSettings.nonEmpty) {
for (setting <- notificationSettings) {
if ((NotificationType.None.name == setting.getLevel) || (NotificationType.Minimal.name == setting.getLevel)) {
notifyUsers = notifyUsers.filter(notifyUser => !(notifyUser.getUsername == setting.getUsername))
}
}
}
notifyUsers
}
def sendNotificationForCreateAction(notification: SimpleRelayEmailNotification) {
val notifiers: Set[SimpleUser] = getNotifyUsers(notification.asInstanceOf[ProjectRelayEmailNotification])
if (notifiers != null && notifiers.nonEmpty) {
onInitAction(notification)
import scala.collection.JavaConversions._
for (user <- notifiers) {
val context: MailContext[B] = new MailContext[B](notification, user, siteUrl)
bean = getBeanInContext(context)
if (bean != null) {
context.setWrappedBean(bean)
buildExtraTemplateVariables(context)
contentGenerator.putVariable("context", context)
contentGenerator.putVariable("mapper", getItemFieldMapper)
contentGenerator.putVariable("userName", user.getDisplayName)
val userMail: MailRecipientField = new MailRecipientField(user.getEmail, user.getUsername)
val recipients: List[MailRecipientField] = List[MailRecipientField](userMail)
extMailService.sendHTMLMail(SiteConfiguration.getNoReplyEmail, SiteConfiguration.getDefaultSiteName, recipients,
null, null, contentGenerator.parseString(getCreateSubject(context)),
contentGenerator.parseFile("templates/email/project/itemCreatedNotifier.mt", context.getLocale,
SiteConfiguration.getDefaultLocale), null)
}
}
}
}
def sendNotificationForUpdateAction(notification: SimpleRelayEmailNotification) {
val notifiers: Set[SimpleUser] = getNotifyUsers(notification.asInstanceOf[ProjectRelayEmailNotification])
if (notifiers != null && notifiers.nonEmpty) {
onInitAction(notification)
import scala.collection.JavaConversions._
for (user <- notifiers) {
val context: MailContext[B] = new MailContext[B](notification, user, siteUrl)
bean = getBeanInContext(context)
if (bean != null) {
context.setWrappedBean(bean)
contentGenerator.putVariable("userName", user.getDisplayName)
buildExtraTemplateVariables(context)
if (context.getTypeid != null) {
val auditLog: SimpleAuditLog = auditLogService.findLastestLog(context.getTypeid.toInt, context.getSaccountid)
contentGenerator.putVariable("historyLog", auditLog)
contentGenerator.putVariable("context", context)
contentGenerator.putVariable("mapper", getItemFieldMapper)
}
val userMail: MailRecipientField = new MailRecipientField(user.getEmail, user.getUsername)
val recipients: List[MailRecipientField] = List[MailRecipientField](userMail)
extMailService.sendHTMLMail(SiteConfiguration.getNoReplyEmail, SiteConfiguration.getDefaultSiteName, recipients,
null, null, contentGenerator.parseString(getUpdateSubject(context)),
contentGenerator.parseFile("templates/email/project/itemUpdatedNotifier.mt",
context.getLocale, SiteConfiguration.getDefaultLocale), null)
}
}
}
}
def sendNotificationForCommentAction(notification: SimpleRelayEmailNotification) {
val notifiers: Set[SimpleUser] = getNotifyUsers(notification.asInstanceOf[ProjectRelayEmailNotification])
if (notifiers != null && notifiers.nonEmpty) {
onInitAction(notification)
import scala.collection.JavaConversions._
for (user <- notifiers) {
val context: MailContext[B] = new MailContext[B](notification, user, siteUrl)
bean = getBeanInContext(context)
if (bean != null) {
buildExtraTemplateVariables(context)
contentGenerator.putVariable("userName", user.getDisplayName)
contentGenerator.putVariable("comment", context.getEmailNotification)
val userMail: MailRecipientField = new MailRecipientField(user.getEmail, user.getUsername)
val recipients: List[MailRecipientField] = List[MailRecipientField](userMail)
extMailService.sendHTMLMail(SiteConfiguration.getNoReplyEmail, SiteConfiguration.getDefaultSiteName, recipients,
null, null, contentGenerator.parseString(getCommentSubject(context)),
contentGenerator.parseFile("templates/email/project/itemCommentNotifier.mt",
context.getLocale, SiteConfiguration.getDefaultLocale), null)
}
}
}
}
private def onInitAction(notification: SimpleRelayEmailNotification): Unit = {
siteUrl = MailUtils.getSiteUrl(notification.getSaccountid)
}
protected def getBeanInContext(context: MailContext[B]): B
protected def buildExtraTemplateVariables(context: MailContext[B])
protected def getItemName: String
protected def getCreateSubject(context: MailContext[B]): String
protected def getUpdateSubject(context: MailContext[B]): String
protected def getCommentSubject(context: MailContext[B]): String
protected def getItemFieldMapper: ItemFieldMapper
}
| maduhu/mycollab | mycollab-scheduler/src/main/scala/com/esofthead/mycollab/schedule/email/project/service/SendMailToAllMembersAction.scala | Scala | agpl-3.0 | 8,646 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v2
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import org.mockito.Mockito._
import uk.gov.hmrc.ct.box.CtValidation
import uk.gov.hmrc.ct.box.retriever.BoxRetriever
import uk.gov.hmrc.ct.computations.CP287
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
class RSQ2Spec extends WordSpec with Matchers with MockitoSugar {
"RSQ2 validation" should {
"return no error if input value populated and CP287 has no value" in {
val boxRetriever = mock[ComputationsBoxRetriever]
when(boxRetriever.cp287()).thenReturn(CP287(None))
RSQ2(Some(true)).validate(boxRetriever) shouldBe empty
}
"return a mandatory error if no input value populated and CP287 has no value" in {
val boxRetriever = mock[ComputationsBoxRetriever]
when(boxRetriever.cp287()).thenReturn(CP287(None))
RSQ2(None).validate(boxRetriever) shouldBe Set(CtValidation(Some("RSQ2"), "error.RSQ2.required"))
}
"return a mandatory error if no input value populated and CP287 has value 0" in {
val boxRetriever = mock[ComputationsBoxRetriever]
when(boxRetriever.cp287()).thenReturn(CP287(Some(0)))
RSQ2(None).validate(boxRetriever) shouldBe Set(CtValidation(Some("RSQ2"), "error.RSQ2.required"))
}
"return a mandatory error if no input value populated, and not a computations box retriever" in {
val boxRetriever = mock[BoxRetriever]
RSQ2(None).validate(boxRetriever) shouldBe Set(CtValidation(Some("RSQ2"), "error.RSQ2.required"))
}
"return no error if input value populated, and not a computations box retriever" in {
val boxRetriever = mock[BoxRetriever]
RSQ2(Some(false)).validate(boxRetriever) shouldBe empty
}
"return a box should not exist error if there is an input value populated and CP287 has a value" in {
val boxRetriever = mock[ComputationsBoxRetriever]
when(boxRetriever.cp287()).thenReturn(CP287(Some(10)))
RSQ2(Some(true)).validate(boxRetriever) shouldBe Set(CtValidation(Some("RSQ2"), "error.RSQ2.cannot.exist"))
}
"return no error if there is no input value populated and CP287 has a value" in {
val boxRetriever = mock[ComputationsBoxRetriever]
when(boxRetriever.cp287()).thenReturn(CP287(Some(10)))
RSQ2(None).validate(boxRetriever) shouldBe empty
}
"return no error if there is an input value populated and CP287 has a value of 0" in {
val boxRetriever = mock[ComputationsBoxRetriever]
when(boxRetriever.cp287()).thenReturn(CP287(Some(0)))
RSQ2(Some(false)).validate(boxRetriever) shouldBe empty
}
}
}
| liquidarmour/ct-calculations | src/test/scala/uk/gov/hmrc/ct/ct600/v2/RSQ2Spec.scala | Scala | apache-2.0 | 3,291 |
/*
* Stratio Meta
*
* Copyright (c) 2014, Stratio, All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3.0 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library.
*/
package com.stratio.meta.server.config;
import com.typesafe.config.Config
object StreamingConfig {
val KAFKA_SERVER = "config.kafka.server"
val KAFKA_PORT = "config.kafka.port"
val ZOOKEEPER_SERVER = "config.zookeeper.server"
val ZOOKEEPER_PORT = "config.zookeeper.port"
val STREAMING_DURATION = "config.streaming.duration"
val STREAMING_GROUPID = "config.streaming.groupId"
}
trait StreamingConfig {
def config: Config = ???
lazy val kafkaServer: String = config.getString(StreamingConfig.KAFKA_SERVER)
lazy val kafkaPort: Int = config.getInt(StreamingConfig.KAFKA_PORT)
lazy val zookeeperServer: String = config.getString(StreamingConfig.ZOOKEEPER_SERVER)
lazy val zookeeperPort: Int = config.getInt(StreamingConfig.ZOOKEEPER_PORT)
lazy val streamingDuration: Int = config.getInt(StreamingConfig.STREAMING_DURATION)
lazy val streamingGroupId: String = config.getString(StreamingConfig.STREAMING_GROUPID)
} | dhiguero/stratio-meta | meta-server/src/main/scala/com/stratio/meta/server/config/StreamingConfig.scala | Scala | gpl-3.0 | 1,649 |
package scalapb.spark.internal
private[spark] object MapHelpers {
def fromIterator[K, V](it: => Iterator[(K, V)]): Map[K, V] = new Map[K, V] {
def iterator: Iterator[(K, V)] = it
def +[V1 >: V](kv: (K, V1)): scala.collection.immutable.Map[K, V1] = ???
def -(key: K): scala.collection.immutable.Map[K, V] = ???
def get(key: K): Option[V] = ???
}
}
| scalapb/sparksql-scalapb | sparksql-scalapb/src/main/scala-2.12/scalapb/spark/internal/MapHelpers.scala | Scala | apache-2.0 | 369 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.repl
/**
* Runtime patch utilities.
*/
private[repl] object PatchUtils {
// The patcher accepts object and its defining class and return true if patching was successful
type Patcher = (AnyRef, Class[_]) => Boolean
// Actual patch definition
type Patch = (ClassLoader) => Boolean
/**
* Path given object.
*
* @param fullClassName class name of object
* @param classloader classloader to use for loading the object definition
* @param patcher actual patcher
* @return true if patching was successful else false
*/
def patchObject(fullClassName: String, classloader: ClassLoader, patcher: Patcher): Boolean = {
val clz = Class.forName(fullClassName + "$", false, classloader)
val module = getModule(clz)
// Patch it
patcher(module, clz)
}
def getModule(objectClass: Class[_]): AnyRef = {
val f = objectClass.getField("MODULE$")
f.get(null)
}
val OUTER_SCOPES_CLASS = "org.apache.spark.sql.catalyst.encoders.OuterScopes"
val OUTER_SCOPE_REPL_REGEX = """^((?:intp_id_\\d+)??\\$line(?:\\d+)\\.\\$read)(?:\\$\\$iw)+$""".r
// Patch Spark OuterScopes definition
val patchOuterScopes: Patch = (classLoader: ClassLoader) => {
val patcher: Patcher = (obj: AnyRef, clz: Class[_]) => {
val f = clz.getDeclaredField("REPLClass")
f.setAccessible(true)
try {
f.set(obj, OUTER_SCOPE_REPL_REGEX)
} catch {
case _: IllegalArgumentException => // we have already patched once
}
true
}
patchObject(OUTER_SCOPES_CLASS, classLoader, patcher)
}
// Manages all runtime patches in the system
// Note: if necessary it should accept environment configuration and
// apply patch only if it is applicable for given environment (e.g., Specific Scala + Specific Spark)
object PatchManager {
private val patches = Map(
"SW-386" ->
("Patches OuterScope to replace default REPL regexp by one which understand H2O REPL", patchOuterScopes))
def patch(jiraId: String, classLoader: ClassLoader): Boolean = {
patches.get(jiraId).map(p => p._2(classLoader)).getOrElse(false)
}
def patchInfo(jiraId: String): String = {
patches.get(jiraId).map(_._1).getOrElse("NOT FOUND")
}
}
}
| h2oai/sparkling-water | repl/src/main/scala/ai/h2o/sparkling/repl/PatchUtils.scala | Scala | apache-2.0 | 3,081 |
package org.http4s.client.impl
import cats._
import cats.implicits._
import org.http4s._
import org.http4s.headers.`Content-Length`
sealed trait RequestGenerator extends Any {
def method: Method
}
trait EmptyRequestGenerator[F[_]] extends Any with RequestGenerator {
/** Make a [[org.http4s.Request]] using this [[Method]] */
final def apply(uri: Uri, headers: Header*)(implicit F: Applicative[F]): F[Request[F]] =
F.pure(Request(method, uri, headers = Headers(headers: _*)))
}
trait EntityRequestGenerator[F[_]] extends Any with EmptyRequestGenerator[F] {
/** Make a [[org.http4s.Request]] using this Method */
final def apply[A](uri: Uri, body: A, headers: Header*)(
implicit F: Monad[F],
w: EntityEncoder[F, A]): F[Request[F]] = {
val h = w.headers ++ headers
w.toEntity(body).flatMap {
case Entity(proc, len) =>
val headers = len
.map { l =>
`Content-Length`.fromLong(l).fold(_ => h, c => h.put(c))
}
.getOrElse(h)
F.pure(Request(method = method, uri = uri, headers = headers, body = proc))
}
}
}
| reactormonk/http4s | client/src/main/scala/org/http4s/client/impl/RequestGenerator.scala | Scala | apache-2.0 | 1,111 |
package pyspark_elastic
import java.util.{ Map => JMap }
import scala.collection.JavaConversions.mapAsScalaMap
import org.apache.spark.api.java.JavaRDD
import org.apache.spark.api.java.JavaSparkContext
import org.apache.spark.rdd.RDD
import org.elasticsearch.spark.sparkContextFunctions
import org.elasticsearch.spark.sparkStringJsonRDDFunctions
import pyspark_util.Pickling
import pyspark_util.Pickling.toPickleableRDD
import pyspark_util.Pickling.toUnpickleableRDD
import org.elasticsearch.spark.rdd.ScalaEsRDD
import org.elasticsearch.spark.rdd.ScalaEsRDD
class PythonHelper() {
implicit val pickling = new Pickling()
def esJsonRDD(sc: JavaSparkContext, cfg: JMap[String, String]) = {
val rdd = sc.sc.esJsonRDD(config(cfg))
JavaRDD.fromRDD(rdd)
}
def saveJsonToEs(rdd: JavaRDD[Array[Byte]], cfg: JMap[String, String]) = {
rdd.rdd.unpickle().asInstanceOf[RDD[String]].saveJsonToEs(config(cfg))
}
private[this] def config(cfg: JMap[String, String]) = {
if (cfg != null) {
mapAsScalaMap(cfg)
} else {
Map[String, String]()
}
}
}
| TargetHolding/pyspark-elastic | src/main/scala/pyspark_elastic/PythonHelper.scala | Scala | apache-2.0 | 1,085 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar
import slamdata.Predef._
import quasar.contrib.scalaz.eitherT._
import quasar.fp._
import scalaz._
import scalaz.concurrent._
import scalaz.syntax.monad._
import scalaz.syntax.std.option._
object Errors {
type ETask[E, X] = EitherT[Task, E, X]
def handle[E, A, B>:A](t: ETask[E, A])(f: PartialFunction[Throwable, B]):
ETask[E, B] = {
type G[F[_], X] = EitherT[F, E, X]
Catchable[G[Task, ?]].attempt(t) flatMap {
case -\\/(t) => f.lift(t).cata(Task.now, Task.fail(t)).liftM[G]
case \\/-(a) => Applicative[G[Task, ?]].point(a)
}
}
def handleWith[E, A, B>:A](t: ETask[E, A])(f: PartialFunction[Throwable, ETask[E, B]]):
ETask[E, B] = {
Catchable[ETask[E, ?]].attempt(t) flatMap {
case -\\/(t) => f.lift(t) getOrElse liftE(Task.fail(t))
case \\/-(a) => Applicative[ETask[E, ?]].point(a)
}
}
def liftE[E]: (Task ~> ETask[E, ?]) = liftMT[Task, EitherT[?[_], E, ?]]
}
/** Given a function A => B, returns a natural transformation from
* EitherT[F, A, ?] ~> EitherT[F, B, ?].
*
* Partially applies the monad, `F`, for better inference, so use like
* `convertError[F](f)`
*/
object convertError {
def apply[F[_]]: Aux[F] =
new Aux[F]
final class Aux[F[_]] {
def apply[A, B](f: A => B)(implicit F: Functor[F]): EitherT[F, A, ?] ~> EitherT[F, B, ?] =
new (EitherT[F, A, ?] ~> EitherT[F, B, ?]) {
def apply[C](ea: EitherT[F, A, C]) = ea.leftMap(f)
}
}
}
| drostron/quasar | effect/src/main/scala/quasar/errors.scala | Scala | apache-2.0 | 2,081 |
package pt.cnbc.wikimodels.mathml.elements
/**
* User: alex
* Date: 30/Set/2010
* Time: 23:49:57
* This class assumes that Ci only has strings has identifiers. The use of presentation markup is allowed by the
* MathML 2.0 spec but not by an ASCIIMathML representation. As such content is a string.
* In SBML's subset the type attribute is not a part of Ci elements
*/
case class Ci(content:String,
definitionURL:Option[String]=None) extends Token | alexmsmartins/WikiModels | wm_math_parser/src/main/scala/pt/cnbc/wikimodels/mathml/elements/Ci.scala | Scala | mit | 468 |
//
// NumericsConfig.scala -- Scala configuration object NumericsConfig
// Project OrcScala
//
// Copyright (c) 2018 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.values
import scala.math.BigDecimal
object NumericsConfig {
final def toOrcIntegral(s: String): Number = {
if (NumericsConfig.preferLong)
s.toLong
else
BigInt(s)
}
final def toOrcFloatingPoint(s: String): Number = {
if (NumericsConfig.preferDouble)
s.toDouble
else
BigDecimal(s)
}
final def toOrcIntegral(v: Number): Number = {
if (NumericsConfig.preferLong)
v.longValue()
else {
// v match {
// case d: BigInt =>
// d
// case d: BigDecimal =>
// d.toBigInt()
// case d: java.lang.Double =>
// BigDecimal(d).toBigInt()
// case _ if v.longValue() == v =>
// BigInt(v.longValue())
// case _ =>
// BigInt(v.toString)
// }
if (v.longValue() == v) {
BigInt(v.longValue())
} else {
BigInt(v.toString)
}
}
}
final def toOrcFloatingPoint(v: Number): Number = {
if (NumericsConfig.preferDouble)
v.doubleValue()
else {
// v match {
// case d: BigInt =>
// BigDecimal(d)
// case d: BigDecimal =>
// d
// case d: java.lang.Double =>
// BigDecimal(d)
// case _ if v.doubleValue() == v =>
// BigDecimal(v.doubleValue())
// case _ =>
// BigDecimal(v.toString)
// }
if (v.longValue() == v) {
BigInt(v.longValue())
} else {
BigInt(v.toString)
}
}
}
@inline
final val preferLP = System.getProperty("orc.numerics.preferLP", "false").toBoolean
@inline
final val preferDouble = Option(System.getProperty("orc.numerics.preferDouble")).map(_.toBoolean).getOrElse(preferLP)
@inline
final val preferLong = Option(System.getProperty("orc.numerics.preferLong")).map(_.toBoolean).getOrElse(preferLP)
}
| orc-lang/orc | OrcScala/src/orc/values/NumericsConfig.scala | Scala | bsd-3-clause | 2,232 |
package com.mehmetakiftutuncu.eshotroidplus.utilities.base
import com.github.mehmetakiftutuncu.errors.Errors
import com.mehmetakiftutuncu.eshotroidplus.utilities.JsonErrorRepresenter
import play.api.http.ContentTypes
import play.api.libs.json.{JsValue, Json}
import play.api.mvc.{Controller, Result}
import scala.concurrent.Future
trait ControllerBase extends Controller {
def okWithError(errors: Errors): Result = {
Ok(Json.obj("errors" -> errors.represent(JsonErrorRepresenter))).as(ContentTypes.JSON)
}
def okWithJson(json: JsValue): Result = {
Ok(Json.obj("success" -> json)).as(ContentTypes.JSON)
}
def okWithText(text: String): Result = {
Ok(text).as(ContentTypes.TEXT)
}
def okWithHtml(html: String): Result = {
Ok(html).as(ContentTypes.HTML)
}
def futureOkWithError(errors: Errors): Future[Result] = {
Future.successful(okWithError(errors))
}
def futureOkWithJson(json: JsValue): Future[Result] = {
Future.successful(okWithJson(json))
}
def futureOkWithText(text: String): Future[Result] = {
Future.successful(okWithText(text))
}
def futureOkWithHtml(html: String): Future[Result] = {
Future.successful(okWithHtml(html))
}
}
| mehmetakiftutuncu/EshotroidPlusServer | app/com/mehmetakiftutuncu/eshotroidplus/utilities/base/ControllerBase.scala | Scala | gpl-3.0 | 1,207 |
package notebook
import java.security.SecureRandom
import org.apache.commons.codec.binary.Hex
import org.slf4j.LoggerFactory
import play.api.libs.json._
import rx.lang.scala._
import scala.collection.mutable
private object JSBusState {
private val events = mutable.ArrayBuffer.empty[(String, JsValue)]
val log = LoggerFactory.getLogger(getClass)
@volatile private var publishCallback: (String, JsValue) => Unit = null
def setPublisher(callback: (String, JsValue) => Unit) {
log.debug("Setting publisher")
events.synchronized {
publishCallback = callback
for ((id, value) <- events) {
log.debug("Dequeuing %s to %s".format(value, id))
callback(id, value)
}
events.clear()
}
}
def publish(id: String, value: JsValue) {
if (publishCallback == null) {
events.synchronized {
if (publishCallback == null) {
log.debug("Queuing %s to %s".format(value, id))
events += ((id, value))
return
}
}
}
log.debug("Sending %s to %s" format(value, id))
publishCallback(id, value)
}
}
object JSBus {
private[this] val random = new SecureRandom
protected[this] def newID = {
val bytes = new Array[Byte](16)
random.nextBytes(bytes)
val id = "anon" + new String(Hex.encodeHex(bytes))
id
}
protected[this] def send(id: String, value: JsValue) {
JSBusState.publish(id, value)
}
val log = LoggerFactory.getLogger(getClass)
private[notebook] def forwardClientUpdateMessage(obsId: String,
newValue: JsValue) = idToSubject.get(obsId).map(_.onJsReceived(newValue))
// TODO: How do these things get disposed? Need a notice from Javascript to Scala when an id is disposed, then we dispose all subscriptions (onComplete?)
private val idToSubject: scala.collection.concurrent.Map[String, ValueConnection] = new scala.collection.concurrent.TrieMap[String, ValueConnection]()
class ValueConnection extends Connection[JsValue] {
val observer = new ConcreteObserver[JsValue] {
// Called by extenral parties
override def onNext(arg: JsValue) {
// val wantUpdate = synchronized {
// // Prevent echos by only sending changes
// if (current == null || current != args) {
// current = args
// true
// }
// else {
// false
// }
// }
// if (wantUpdate)
send(id, arg)
}
}
private[this] val subject = Subject[JsValue]()
val observable: Observable[JsValue] = new WrappedObservable[JsValue](subject)
val id = newID
var current: JsValue = null
def onJsReceived(v: JsValue) {
//println(">>><<< : " + v)
subject.onNext(v)
}
}
def createConnection = {
val cxn = new ValueConnection
idToSubject += cxn.id -> cxn
cxn
}
override def toString = "JSBus"
}
| dragos/spark-notebook | modules/observable/src/main/scala/notebook/JSBus.scala | Scala | apache-2.0 | 2,979 |
package de.tu_berlin.formic.gatling.action.tree
import de.tu_berlin.formic.gatling.action.FormicActionBuilder
import io.gatling.core.action.Action
import io.gatling.core.session._
import io.gatling.core.structure.ScenarioContext
/**
* @author Ronny Bräunlich
*/
case class FormicTreeInsertActionBuilder(dataTypeInstanceId: Expression[String], toInsert: Int, pathElements: Seq[Expression[Int]]) extends FormicActionBuilder {
override def build(ctx: ScenarioContext, next: Action): Action = {
val statsEngine = ctx.coreComponents.statsEngine
TreeInsertion(dataTypeInstanceId, toInsert, statsEngine, next, pathElements)
}
}
| rbraeunlich/formic | formic-gatling/src/main/scala/de/tu_berlin/formic/gatling/action/tree/FormicTreeInsertActionBuilder.scala | Scala | apache-2.0 | 640 |
package edu.gemini.pit.ui.action
import java.awt.event.KeyEvent
import edu.gemini.ui.workspace.scala.RichShell
import edu.gemini.pit.model.Model
class UndoAction(shell: RichShell[Model]) extends ShellAction(shell, "Undo", Some(KeyEvent.VK_Z)) {
enabledWhen { shell.canUndo && !shell.model.exists(_.proposal.isSubmitted) }
override def apply() {
shell.undo()
}
} | spakzad/ocs | bundle/edu.gemini.pit/src/main/scala/edu/gemini/pit/ui/action/UndoAction.scala | Scala | bsd-3-clause | 377 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package collection
package mutable
import scala.language.higherKinds
/** Base type of mutable Maps */
trait Map[K, V]
extends Iterable[(K, V)]
with collection.Map[K, V]
with MapOps[K, V, Map, Map[K, V]]
with Growable[(K, V)]
with Shrinkable[K]
with MapFactoryDefaults[K, V, Map, Iterable] {
override def mapFactory: scala.collection.MapFactory[Map] = Map
/*
//TODO consider keeping `remove` because it returns the removed entry
@deprecated("Use subtract or -= instead of remove", "2.13.0")
def remove(key: K): Option[V] = {
val old = get(key)
if(old.isDefined) subtract(key)
old
}
*/
/** The same map with a given default function.
* Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc.
* are not affected by `withDefaultValue`.
*
* Invoking transformer methods (e.g. `map`) will not preserve the default value.
*
* @param d the function mapping keys to values, used for non-present keys
* @return a wrapper of the map with a default value
*/
def withDefault(d: K => V): Map[K, V] = new Map.WithDefault[K, V](this, d)
/** The same map with a given default value.
* Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc.
* are not affected by `withDefaultValue`.
*
* Invoking transformer methods (e.g. `map`) will not preserve the default value.
*
* @param d default value used for non-present keys
* @return a wrapper of the map with a default value
*/
def withDefaultValue(d: V): Map[K, V] = new Map.WithDefault[K, V](this, x => d)
}
/**
* @define coll mutable map
* @define Coll `mutable.Map`
*/
trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]]
extends IterableOps[(K, V), Iterable, C]
with collection.MapOps[K, V, CC, C]
with Cloneable[C]
with Builder[(K, V), C]
with Growable[(K, V)]
with Shrinkable[K] {
def result(): C = coll
@deprecated("Use - or remove on an immutable Map", "2.13.0")
final def - (key: K): C = clone() -= key
@deprecated("Use -- or removeAll on an immutable Map", "2.13.0")
final def - (key1: K, key2: K, keys: K*): C = clone() -= key1 -= key2 --= keys
/** Adds a new key/value pair to this map and optionally returns previously bound value.
* If the map already contains a
* mapping for the key, it will be overridden by the new value.
*
* @param key the key to update
* @param value the new value
* @return an option value containing the value associated with the key
* before the `put` operation was executed, or `None` if `key`
* was not defined in the map before.
*/
def put(key: K, value: V): Option[V] = {
val r = get(key)
update(key, value)
r
}
/** Adds a new key/value pair to this map.
* If the map already contains a
* mapping for the key, it will be overridden by the new value.
*
* @param key The key to update
* @param value The new value
*/
def update(key: K, value: V): Unit = { coll += ((key, value)) }
/**
* Update a mapping for the specified key and its current optionally-mapped value
* (`Some` if there is current mapping, `None` if not).
*
* If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`.
* If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent).
* If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged.
*
* @param key the key value
* @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping
* @return the new value associated with the specified key
*/
def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = {
val previousValue = this.get(key)
val nextValue = remappingFunction(previousValue)
(previousValue, nextValue) match {
case (None, None) => // do nothing
case (Some(_), None) => this.remove(key)
case (_, Some(v)) => this.update(key,v)
}
nextValue
}
/** If given key is already in this map, returns associated value.
*
* Otherwise, computes value from given expression `op`, stores with key
* in map and returns that value.
*
* Concurrent map implementations may evaluate the expression `op`
* multiple times, or may evaluate `op` without inserting the result.
*
* @param key the key to test
* @param op the computation yielding the value to associate with `key`, if
* `key` is previously unbound.
* @return the value associated with key (either previously or as a result
* of executing the method).
*/
def getOrElseUpdate(key: K, op: => V): V =
get(key) match {
case Some(v) => v
case None => val d = op; this(key) = d; d
}
/** Removes a key from this map, returning the value associated previously
* with that key as an option.
* @param key the key to be removed
* @return an option value containing the value associated previously with `key`,
* or `None` if `key` was not defined in the map before.
*/
def remove(key: K): Option[V] = {
val r = get(key)
if (r.isDefined) this -= key
r
}
def clear(): Unit = { keysIterator foreach -= }
override def clone(): C = empty ++= toIterable
@deprecated("Use filterInPlace instead", "2.13.0")
@inline final def retain(p: (K, V) => Boolean): this.type = filterInPlace(p)
/** Retains only those mappings for which the predicate
* `p` returns `true`.
*
* @param p The test predicate
*/
def filterInPlace(p: (K, V) => Boolean): this.type = {
if (nonEmpty) {
val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException
val arrayLength = array.length
var i = 0
while (i < arrayLength) {
val (k, v) = array(i).asInstanceOf[(K, V)]
if (!p(k, v)) {
this -= k
}
i += 1
}
}
this
}
@deprecated("Use mapValuesInPlace instead", "2.13.0")
@inline final def transform(f: (K, V) => V): this.type = mapValuesInPlace(f)
/** Applies a transformation function to all values contained in this map.
* The transformation function produces new values from existing keys
* associated values.
*
* @param f the transformation to apply
* @return the map itself.
*/
def mapValuesInPlace(f: (K, V) => V): this.type = {
iterator foreach {
case (key, value) => update(key, f(key, value))
}
this
}
@deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0")
def updated[V1 >: V](key: K, value: V1): CC[K, V1] =
clone().asInstanceOf[CC[K, V1]].addOne((key, value))
override def knownSize: Int = super[IterableOps].knownSize
}
/**
* $factoryInfo
* @define coll mutable map
* @define Coll `mutable.Map`
*/
@SerialVersionUID(3L)
object Map extends MapFactory.Delegate[Map](HashMap) {
@SerialVersionUID(3L)
class WithDefault[K, V](val underlying: Map[K, V], val defaultValue: K => V)
extends AbstractMap[K, V]
with MapOps[K, V, Map, WithDefault[K, V]] with Serializable {
override def default(key: K): V = defaultValue(key)
def iterator: scala.collection.Iterator[(K, V)] = underlying.iterator
override def isEmpty: Boolean = underlying.isEmpty
override def knownSize: Int = underlying.knownSize
override def mapFactory: MapFactory[Map] = underlying.mapFactory
override def clear(): Unit = underlying.clear()
def get(key: K): Option[V] = underlying.get(key)
def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this }
def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this }
override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): Map[K, V2] =
underlying.concat(suffix).withDefault(defaultValue)
override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue)
override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]): WithDefault[K, V] =
new WithDefault[K, V](mapFactory.from(coll), defaultValue)
override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] =
Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue))
}
}
/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */
@SerialVersionUID(3L)
abstract class AbstractMap[K, V] extends scala.collection.AbstractMap[K, V] with Map[K, V]
| martijnhoekstra/scala | src/library/scala/collection/mutable/Map.scala | Scala | apache-2.0 | 9,130 |
package patmat
import org.scalatest.FunSuite
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import patmat.Huffman._
@RunWith(classOf[JUnitRunner])
class HuffmanSuite extends FunSuite {
trait TestTrees {
val t1 = Fork(Leaf('a',2), Leaf('b',3), List('a','b'), 5)
val t2 = Fork(Fork(Leaf('a',2), Leaf('b',3), List('a','b'), 5), Leaf('d',4), List('a','b','d'), 9)
}
test("weight of a larger tree") {
new TestTrees {
assert(weight(t1) === 5)
}
}
test("chars of a larger tree") {
new TestTrees {
assert(chars(t2) === List('a','b','d'))
}
}
test("string2chars(\\"hello, world\\")") {
assert(string2Chars("hello, world") === List('h', 'e', 'l', 'l', 'o', ',', ' ', 'w', 'o', 'r', 'l', 'd'))
}
test("makeOrderedLeafList for some frequency table") {
assert(makeOrderedLeafList(List(('t', 2), ('e', 1), ('x', 3))) === List(Leaf('e',1), Leaf('t',2), Leaf('x',3)))
}
test("combine of some leaf list") {
val leaflist = List(Leaf('e', 1), Leaf('t', 2), Leaf('x', 4))
assert(combine(leaflist) === List(Fork(Leaf('e',1),Leaf('t',2),List('e', 't'),3), Leaf('x',4)))
}
test("decode and encode a very short text should be identity") {
new TestTrees {
assert(decode(t1, encode(t1)("ab".toList)) === "ab".toList)
}
}
test("test quick encode") {
new TestTrees {
assert(decode(t2, quickEncode(t2)("abddaba".toList)) === "abddaba".toList)
}
}
}
| jatinshah/coursera_scala_programming | wk4/patmat/src/test/scala/patmat/HuffmanSuite.scala | Scala | mit | 1,465 |
package mesosphere.marathon.event.http
import akka.actor._
import com.google.inject.Inject
import mesosphere.marathon.api.LeaderInfo
import mesosphere.marathon.event.LocalLeadershipEvent
import mesosphere.marathon.event.http.HttpEventStreamActor._
import mesosphere.marathon.metrics.Metrics.AtomicIntGauge
import mesosphere.marathon.metrics.{ MetricPrefixes, Metrics }
import org.apache.log4j.Logger
import scala.util.Try
/**
* A HttpEventStreamHandle is a reference to the underlying client http stream.
*/
trait HttpEventStreamHandle {
def id: String
def remoteAddress: String
def sendEvent(event: String, message: String): Unit
def close(): Unit
}
class HttpEventStreamActorMetrics @Inject() (metrics: Metrics) {
val numberOfStreams: AtomicIntGauge =
metrics.gauge(metrics.name(MetricPrefixes.API, getClass, "number-of-streams"), new AtomicIntGauge)
}
/**
* This actor handles subscriptions from event stream handler.
* It subscribes to the event stream and pushes all marathon events to all listener.
*/
class HttpEventStreamActor(
leaderInfo: LeaderInfo,
metrics: HttpEventStreamActorMetrics,
handleStreamProps: HttpEventStreamHandle => Props)
extends Actor {
//map from handle to actor
private[http] var streamHandleActors = Map.empty[HttpEventStreamHandle, ActorRef]
private[this] val log = Logger.getLogger(getClass)
override def preStart(): Unit = {
metrics.numberOfStreams.setValue(0)
leaderInfo.subscribe(self)
}
override def postStop(): Unit = {
leaderInfo.unsubscribe(self)
metrics.numberOfStreams.setValue(0)
}
override def receive: Receive = standby
// behaviours
private[this] val active: Receive = behaviour(acceptingNewConnections)
private[this] val standby: Receive = behaviour(rejectingNewConnections)
/**
* Helper method to create behaviours.
* The behaviours only differ in how they deal with new connections.
*/
private[this] def behaviour(newConnectionBehaviour: Receive): Receive = {
Seq(
handleLeadership,
cleanupHandlerActors,
newConnectionBehaviour,
warnAboutUnknownMessages
).reduceLeft {
// Prevent fatal warning about deriving type Any as type parameter
_.orElse[Any, Unit](_)
}
}
// behaviour components
/** Immediately close new connections. */
private[this] def rejectingNewConnections: Receive = {
case HttpEventStreamConnectionOpen(handle) =>
log.warn("Ignoring open connection request. Closing handle.")
Try(handle.close())
}
/** Accept new connections and create an appropriate handler for them. */
private[this] def acceptingNewConnections: Receive = {
case HttpEventStreamConnectionOpen(handle) =>
metrics.numberOfStreams.setValue(streamHandleActors.size)
log.info(s"Add EventStream Handle as event listener: $handle. Current nr of streams: ${streamHandleActors.size}")
val actor = context.actorOf(handleStreamProps(handle), handle.id)
context.watch(actor)
streamHandleActors += handle -> actor
}
/** Switch behavior according to leadership changes. */
private[this] def handleLeadership: Receive = {
case LocalLeadershipEvent.Standby =>
log.info("Now standing by. Closing existing handles and rejecting new.")
streamHandleActors.keys.foreach(removeHandler)
context.become(standby)
case LocalLeadershipEvent.ElectedAsLeader =>
log.info("Became active. Accepting event streaming requests.")
context.become(active)
}
/** Cleanup child actors which are not needed anymore. */
private[this] def cleanupHandlerActors: Receive = {
case HttpEventStreamConnectionClosed(handle) => removeHandler(handle)
case Terminated(actor) => unexpectedTerminationOfHandlerActor(actor)
}
private[this] def removeHandler(handle: HttpEventStreamHandle): Unit = {
streamHandleActors.get(handle).foreach { actor =>
context.unwatch(actor)
context.stop(actor)
streamHandleActors -= handle
metrics.numberOfStreams.setValue(streamHandleActors.size)
log.info(s"Removed EventStream Handle as event listener: $handle. " +
s"Current nr of listeners: ${streamHandleActors.size}")
}
}
private[this] def unexpectedTerminationOfHandlerActor(actor: ActorRef): Unit = {
streamHandleActors.find(_._2 == actor).foreach {
case (handle, ref) =>
log.error(s"Actor terminated unexpectedly: $handle")
streamHandleActors -= handle
metrics.numberOfStreams.setValue(streamHandleActors.size)
}
}
private[this] def warnAboutUnknownMessages: Receive = {
case message: Any => log.warn(s"Received unexpected message $message")
}
}
object HttpEventStreamActor {
case class HttpEventStreamConnectionOpen(handler: HttpEventStreamHandle)
case class HttpEventStreamConnectionClosed(handle: HttpEventStreamHandle)
}
| EasonYi/marathon | src/main/scala/mesosphere/marathon/event/http/HttpEventStreamActor.scala | Scala | apache-2.0 | 4,907 |
package edu.emory.mathcs.ir.liveqa.util
import collection.JavaConverters._
import edu.stanford.nlp.simple.Document
/**
* Created by dsavenk on 5/27/16.
*/
object NlpUtils {
def getLemmas(doc: Document): Seq[String] = {
doc.sentences().asScala
.flatMap(s => s.lemmas.asScala)
.map(_.toLowerCase)
.filter(_.headOption.getOrElse(' ').isLetterOrDigit)
}
}
| emory-irlab/liveqa | src/main/scala/edu/emory/mathcs/ir/liveqa/util/NlpUtils.scala | Scala | mit | 382 |
package controllers
import play.api.mvc._
import scala.concurrent.ExecutionContext
import ExecutionContext.Implicits.global
object Table extends Controller with SettingsMongo {
def get(basket: String) = Action.async {
val fData = db.get(basket)
fData.map { data =>
Ok(views.html.table(data))
}
}
} | kushti/SimpleMonitoring | app/controllers/Table.scala | Scala | apache-2.0 | 323 |
package Client.Messages
import spray.http.HttpResponse
case class GetMsg(response: HttpResponse, reaction: String) | Nirespire/SecureFacebookAPI | src/main/scala/Client/Messages/GetMsg.scala | Scala | mit | 116 |
package uk.ac.ncl.openlab.intake24.systemsql.admin
import java.time._
import java.time.format.DateTimeFormatter
import java.time.temporal.ChronoUnit
import javax.inject.{Inject, Named, Singleton}
import javax.sql.DataSource
import anorm.Macro.ColumnNaming
import org.slf4j.LoggerFactory
import uk.ac.ncl.openlab.intake24.errors.{LookupError, RecordNotFound, UnexpectedDatabaseError}
import uk.ac.ncl.openlab.intake24.services.systemdb.admin._
import uk.ac.ncl.openlab.intake24.sql.SqlDataService
import anorm.{Macro, SQL, SqlParser, ~}
@Singleton
class ScheduledDataExportImpl @Inject()(@Named("intake24_system") val dataSource: DataSource) extends ScheduledDataExportService with SqlDataService {
private val logger = LoggerFactory.getLogger(classOf[ScheduledDataExportImpl])
private val timeFormatter = DateTimeFormatter.ofPattern("HH:mm")
private def nextRunAt(daysOfWeek: Int, time: LocalTime, tz: ZoneId): Option[LocalDateTime] = {
def tryNextDate(candidate: ZonedDateTime, offset: Int): Option[ZonedDateTime] =
if (offset > 7)
None
else {
val nowThere = ZonedDateTime.now(tz)
val dow = candidate.getDayOfWeek.getValue - 1
if (nowThere.compareTo(candidate) < 0 && (daysOfWeek & (1 << dow)) != 0)
Some(candidate)
else
tryNextDate(candidate.plus(1, ChronoUnit.DAYS), offset + 1)
}
val localRunAt = tryNextDate(ZonedDateTime.of(LocalDate.now(tz), time, tz), 0).map(_.toOffsetDateTime)
localRunAt.map(_.atZoneSameInstant(ZoneOffset.UTC).toLocalDateTime)
}
def createScheduledTask(userId: Long, surveyId: String, periodDays: Option[Int], daysOfWeek: Int, time: LocalTime, timeZoneId: String,
action: String, actionConfig: String): Either[UnexpectedDatabaseError, Long] = tryWithConnection {
implicit conn =>
val nextRunTime = nextRunAt(daysOfWeek, time, ZoneId.of(timeZoneId))
nextRunTime match {
case Some(runTime) =>
val id: Option[Long] = SQL(
"""INSERT INTO data_export_scheduled(user_id, survey_id, period_days, days_of_week, time, time_zone, action, action_config, next_run_utc)
|VALUES({user_id},{survey_id},{period_days},{days_of_week},{time}::time,{time_zone},{action},{action_config},{next_run_utc})""".stripMargin)
.on('user_id -> userId, 'survey_id -> surveyId, 'period_days -> periodDays, 'days_of_week -> daysOfWeek,
'time -> time.format(timeFormatter), 'time_zone -> timeZoneId, 'action -> action, 'action_config -> actionConfig,
'next_run_utc -> runTime)
.executeInsert()
id match {
case Some(id) => Right(id)
case None => Left(UnexpectedDatabaseError(new RuntimeException("insert failed")))
}
case None =>
Left(UnexpectedDatabaseError(new RuntimeException("Cannot find a suitable run time for task, check days of week setting")))
}
}
def getPendingScheduledTasks(): Either[UnexpectedDatabaseError, Seq[PendingScheduledExportTask]] = tryWithConnection {
implicit conn =>
Right(SQL(
"""SELECT id, user_id, survey_id, period_days, action, action_config
FROM data_export_scheduled
WHERE now() AT TIME ZONE 'utc' > next_run_utc ORDER BY next_run_utc
""".stripMargin)
.executeQuery()
.as(Macro.namedParser[PendingScheduledExportTask](ColumnNaming.SnakeCase).*))
}
def updateNextRunTime(scheduledTaskId: Long): Either[LookupError, Unit] = tryWithConnection {
implicit conn =>
withTransaction {
val taskParams = SQL("SELECT days_of_week, time::text, time_zone FROM data_export_scheduled WHERE id = {task_id}")
.on('task_id -> scheduledTaskId)
.executeQuery()
.as((SqlParser.int(1) ~ SqlParser.str(2) ~ SqlParser.str(3)).singleOpt)
taskParams match {
case Some(daysOfWeek ~ timeStr ~ timeZoneId) =>
val time = LocalTime.parse(timeStr)
val nextRunTime = nextRunAt(daysOfWeek, time, ZoneId.of(timeZoneId))
nextRunTime match {
case Some(time) =>
SQL("UPDATE data_export_scheduled SET next_run_utc={next_run} WHERE id={task_id}")
.on('next_run -> nextRunTime, 'task_id -> scheduledTaskId)
.executeUpdate()
Right(())
case None =>
Left(UnexpectedDatabaseError(new RuntimeException(s"Cannot find a suitable run time for scheduled export task $scheduledTaskId, check days of week setting")))
}
case None => Left(RecordNotFound(new RuntimeException(s"Task $scheduledTaskId not found")))
}
}
}
}
| digitalinteraction/intake24 | SystemDataSQL/src/main/scala/uk/ac/ncl/openlab/intake24/systemsql/admin/ScheduledDataExportImpl.scala | Scala | apache-2.0 | 4,744 |
package cwe.scala.library.graph
class Vertex
| wwwigii-system/research | cwe-scala-library/src/cwe/scala/library/graph/Vertex.scala | Scala | gpl-3.0 | 46 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.stats
import enumeratum.EnumEntry
import io.truthencode.ddo.api.model.effect.DetailedEffect
import io.truthencode.ddo.enhancement.BonusType
import io.truthencode.ddo.model.attribute.Attribute
import io.truthencode.ddo.model.effect._
import io.truthencode.ddo.support.naming.UsingSearchPrefix
import scala.util.Try
trait StatItem[T <: EnumEntry, V] {
val item: T
def value: V
def modifiers: List[PartModifier[V, T]] = Nil
}
/**
* Convenience trait used when the type is also the value, i.e. Alignment or Race is itself the
* value.
* @tparam T
* The basic type
*/
trait SimpleStatItem[T <: EnumEntry] extends StatItem[T, T]
trait AttributeStat extends StatItem[Attribute, Int] {
val baseValue: Int
// override def modifiers: List[PartModifier[Int, Attribute]] = Nil
}
// trait AStr extends AttributeStat[Strength]
case class PlayerAttribute(
override val item: Attribute,
override val baseValue: Int = 0
) extends AttributeStat {
override def value: Int = ???
}
case class PlayerStrAttribute(
override val baseValue: Int = 0
) extends AttributeStat {
// override val item: A = Attribute.Strength
override val item: Attribute = Attribute.Strength
override def value: Int = ???
}
object ThrowAway {
type T = Attribute
type V = Int
type A = BonusType
type MyPair = (EffectParameter, V)
val m: Map[T, List[MyPair]] = Map(
Attribute.Strength -> List(
(EffectParameter.BonusType(BonusType.ActionBoost), 5)
)
)
// val a = new AttributeStat[Attribute.Strength] {}
val f: PartModifier[V, T] with UsingSearchPrefix = new PartModifier[V, T] with UsingSearchPrefix {
/**
* The General Description should be just that. This should not include specific values unless
* all instances will share that value. I.e. a Dodge Effect might state it increases your
* miss-chance, but omit any value such as 20%. Those values will be displayed in the effectText
* of a specific implementation such as the Dodge Feat or Uncanny Dodge
*/
override val generalDescription: String = "Some specifically vague description"
/**
* a list of Categories useful for menu / UI placement and also for searching / querying for
* Miss-Chance or other desired effects.
*
* This list might be constrained or filtered by an Enumeration or CSV file. The goal is to
* enable quick and advanced searching for specific categories from general (Miss-Chance) to
* specific (evasion). In addition, it may be useful for deep searching such as increasing Spot,
* which should suggest not only +Spot items, but +Wisdom or eventually include a feat or
* enhancement that allows the use of some other value as your spot score.
*/
override def categories: Seq[String] = Seq(EffectCategories.General.toString)
/**
* Used when qualifying a search with a prefix. Examples include finding "HalfElf" from
* qualified "Race:HalfElf"
*
* @return
* A default or applied prefix
*/
override def searchPrefixSource: String = Attribute.searchPrefixSource
// lazy override protected[this] val partToModify: T = Attribute.Strength
// private val eb = EffectParameterBuilder()
// .toggleOffValue(triggerOff: _*)
// .toggleOnValue(triggerOn: _*)
// .addBonusType(spellCriticalBonusType)
// .build
//
// override protected[this] def effectParameters: Seq[ParameterModifier[_]] = eb.modifiers
override val effectDetail: DetailedEffect = ???
override val value: V = 3
override val source: SourceInfo = new SourceInfo {
override val sourceId: String = "Example"
override val sourceRef: AnyRef = this
}
override protected[this] val partToModify: T = ???
}
val str: PlayerAttribute = PlayerAttribute(Attribute.Strength, 6)
val l = List((EffectParameter.BonusType, 3))
// val str = PlayerAttribute(item= Strength,baseValue = 6)
def doStr(): Unit = {
val mm: Map[Try[EffectPart], List[PartModifier[V, T]]] =
str.modifiers.filter(f => f.part.isSuccess).groupBy(_.part)
for {
g <- mm
} yield g
for {
part <- str.modifiers.filter(f => f.part.isSuccess).groupBy(_.part)
mod <- part._2
} yield m
// val ps = for {
// group <- str.modifiers.groupBy(_.part)
// feature <- group._2
// param <- feature.parameter
//
// } yield param
// val g: Map[Try[EffectPart], List[Feature[V]]] = str.features.groupBy(_.part)
// g.foreach(k => k._2.)
// str.features.foldLeft(Map[Try[EffectPart]],List[Try[EffectParameter]]()) {(a,b) =>b match {
//
// }}
}
// val strAttribute = StatEnums(Attribute.Strength,)
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/stats/StatItem.scala | Scala | apache-2.0 | 5,517 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.optimizer
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, LogicalPlan}
import org.apache.spark.sql.catalyst.rules.RuleExecutor
class CollapseRepartitionSuite extends PlanTest {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
Batch("CollapseRepartition", FixedPoint(10),
CollapseRepartition) :: Nil
}
val testRelation = LocalRelation('a.int, 'b.int)
test("collapse two adjacent coalesces into one") {
// Always respects the top coalesces amd removes useless coalesce below coalesce
val query1 = testRelation
.coalesce(10)
.coalesce(20)
val query2 = testRelation
.coalesce(30)
.coalesce(20)
val optimized1 = Optimize.execute(query1.analyze)
val optimized2 = Optimize.execute(query2.analyze)
val correctAnswer = testRelation.coalesce(20).analyze
comparePlans(optimized1, correctAnswer)
comparePlans(optimized2, correctAnswer)
}
test("collapse two adjacent repartitions into one") {
// Always respects the top repartition amd removes useless repartition below repartition
val query1 = testRelation
.repartition(10)
.repartition(20)
val query2 = testRelation
.repartition(30)
.repartition(20)
val optimized1 = Optimize.execute(query1.analyze)
val optimized2 = Optimize.execute(query2.analyze)
val correctAnswer = testRelation.repartition(20).analyze
comparePlans(optimized1, correctAnswer)
comparePlans(optimized2, correctAnswer)
}
test("coalesce above repartition") {
// Remove useless coalesce above repartition
val query1 = testRelation
.repartition(10)
.coalesce(20)
val optimized1 = Optimize.execute(query1.analyze)
val correctAnswer1 = testRelation.repartition(10).analyze
comparePlans(optimized1, correctAnswer1)
// No change in this case
val query2 = testRelation
.repartition(30)
.coalesce(20)
val optimized2 = Optimize.execute(query2.analyze)
val correctAnswer2 = query2.analyze
comparePlans(optimized2, correctAnswer2)
}
test("repartition above coalesce") {
// Always respects the top repartition amd removes useless coalesce below repartition
val query1 = testRelation
.coalesce(10)
.repartition(20)
val query2 = testRelation
.coalesce(30)
.repartition(20)
val optimized1 = Optimize.execute(query1.analyze)
val optimized2 = Optimize.execute(query2.analyze)
val correctAnswer = testRelation.repartition(20).analyze
comparePlans(optimized1, correctAnswer)
comparePlans(optimized2, correctAnswer)
}
test("repartitionBy above repartition") {
// Always respects the top repartitionBy amd removes useless repartition
val query1 = testRelation
.repartition(10)
.distribute('a)(20)
val query2 = testRelation
.repartition(30)
.distribute('a)(20)
val optimized1 = Optimize.execute(query1.analyze)
val optimized2 = Optimize.execute(query2.analyze)
val correctAnswer = testRelation.distribute('a)(20).analyze
comparePlans(optimized1, correctAnswer)
comparePlans(optimized2, correctAnswer)
}
test("repartitionBy above coalesce") {
// Always respects the top repartitionBy amd removes useless coalesce below repartition
val query1 = testRelation
.coalesce(10)
.distribute('a)(20)
val query2 = testRelation
.coalesce(30)
.distribute('a)(20)
val optimized1 = Optimize.execute(query1.analyze)
val optimized2 = Optimize.execute(query2.analyze)
val correctAnswer = testRelation.distribute('a)(20).analyze
comparePlans(optimized1, correctAnswer)
comparePlans(optimized2, correctAnswer)
}
test("repartition above repartitionBy") {
// Always respects the top repartition amd removes useless distribute below repartition
val query1 = testRelation
.distribute('a)(10)
.repartition(20)
val query2 = testRelation
.distribute('a)(30)
.repartition(20)
val optimized1 = Optimize.execute(query1.analyze)
val optimized2 = Optimize.execute(query2.analyze)
val correctAnswer = testRelation.repartition(20).analyze
comparePlans(optimized1, correctAnswer)
comparePlans(optimized2, correctAnswer)
}
test("coalesce above repartitionBy") {
// Remove useless coalesce above repartition
val query1 = testRelation
.distribute('a)(10)
.coalesce(20)
val optimized1 = Optimize.execute(query1.analyze)
val correctAnswer1 = testRelation.distribute('a)(10).analyze
comparePlans(optimized1, correctAnswer1)
// No change in this case
val query2 = testRelation
.distribute('a)(30)
.coalesce(20)
val optimized2 = Optimize.execute(query2.analyze)
val correctAnswer2 = query2.analyze
comparePlans(optimized2, correctAnswer2)
}
test("collapse two adjacent repartitionBys into one") {
// Always respects the top repartitionBy
val query1 = testRelation
.distribute('b)(10)
.distribute('a)(20)
val query2 = testRelation
.distribute('b)(30)
.distribute('a)(20)
val optimized1 = Optimize.execute(query1.analyze)
val optimized2 = Optimize.execute(query2.analyze)
val correctAnswer = testRelation.distribute('a)(20).analyze
comparePlans(optimized1, correctAnswer)
comparePlans(optimized2, correctAnswer)
}
}
| jianran/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CollapseRepartitionSuite.scala | Scala | apache-2.0 | 6,405 |
package anomalydetection.algorithms
import anomalydetection.spec.Detector
/**
* Created by Seif-Eddine Benkabou on 28/08/2017.
*/
class Ridge (dist:Array[Array[Double]],k:Int,alpha:Double) extends Detector(dist,k,alpha){
protected def penalty(DI:Array[Array[Double]],lambda :Double):Array[Double]={
val closest= DI.foldLeft(Array[Double]())((r,c)=> r:+c.min )
closest.foldLeft(Array[Double]()){
case(tab,current)=> tab:+ {
((2*lambda)+closest.sum-(closest.size*current)) / (2*lambda*closest.sum)
}
}
}
} | B-Seif/anomaly-detection-time-series | anomalydetection/algorithms/Ridge.scala | Scala | gpl-3.0 | 552 |
/*
*
*/
package see.values
import see.DivisionByZero
/* Floating point number (always double). */
private[see] case class Real(v: Double) extends Number {
type T = Double
override def selType = 'Real
override def isType(typeId: Symbol) = (typeId == 'Real) || super.isType(typeId)
override def toJava = Double.box(v)
override def propagate(other: Comparable) = other match {
case BigI(_) => BigR(toBig)
case BigR(_) => BigR(toBig)
case Str(_) => Str(toStr)
case _ => this
}
override def toBool: Boolean = v > 0
override def toLong: Long = v.toLong
override def toDouble: Double = v
override def toBigI: BigInt = BigDecimal(v).toBigInt()
override def toBig: BigDecimal = BigDecimal(v)
override def cmp(rhs: Comparable) = v compare rhs.propagate(this).toDouble
private def reduce(result: Double) = {
if (result == result.toLong)
Lint(result.toLong)
else Real(result)
}
def abs = Real(v.abs)
def negate: Number = Real(-v)
override def add_(rhs: Number) = {
val r = v + rhs.toDouble
if (r.isInfinite)
BigR(v) add_ rhs
else Real(r)
}
override def sub_(rhs: Number) = {
val r = v - rhs.toDouble
if (r.isInfinite)
BigR(v) sub_ rhs
else Real(r)
}
override def mul_(rhs: Number) = {
val r = v * rhs.toDouble
if (r.isInfinite)
BigR(v) mul_ rhs
else Real(r)
}
override def div_(rhs: Number) = {
val r = rhs.toDouble
if (r == 0.0) throw new DivisionByZero()
else {
val result = v / r
if (result.isInfinite) BigR(v) div_ rhs
else reduce(result)
}
}
override def mod_(rhs: Number) = {
// no overflow, possible here
if (rhs.toDouble == 0.0) throw new DivisionByZero()
else reduce(v % rhs.toDouble)
}
override def pwr_(rhs: Number) = Real(math.pow(v, rhs.toDouble))
override def fits(destType: Class[_]) = if (
(destType == java.lang.Double.TYPE) ||
destType.isAssignableFrom(classOf[java.lang.Double])) 10
else if ((destType == java.lang.Float.TYPE) ||
destType.isAssignableFrom(classOf[java.lang.Float])) 5
else 0
}
| acruise/see | src/main/scala/see/values/Real.scala | Scala | bsd-3-clause | 2,134 |
package spatutorial.client.modules
import japgolly.scalajs.react.extra.router2.RouterCtl
import spatutorial.client.SPAMain.{TodoLoc, DashboardLoc, Loc}
import scalacss.ScalaCssReact._
import japgolly.scalajs.react._
import japgolly.scalajs.react.extra.OnUnmount
import japgolly.scalajs.react.vdom.prefix_<^._
import rx._
import rx.ops._
import spatutorial.client.components.Bootstrap.CommonStyle
import spatutorial.client.components.Icon._
import spatutorial.client.components._
import spatutorial.client.services._
import spatutorial.shared.TodoItem
object MainMenu {
// shorthand for styles
@inline private def bss = GlobalStyles.bootstrapStyles
case class Props(ctl: RouterCtl[Loc], currentLoc: Loc, todos: Rx[Seq[TodoItem]])
case class MenuItem(idx: Int, label: (Props) => ReactNode, icon: Icon, location: Loc)
class Backend(t: BackendScope[Props, _]) extends OnUnmount {
def mounted(): Unit = {
// hook up to Todo changes
val obsItems = t.props.todos.foreach { _ => t.forceUpdate() }
onUnmount {
// stop observing when unmounted (= never in this SPA)
obsItems.kill()
}
MainDispatcher.dispatch(RefreshTodos)
}
}
// build the Todo menu item, showing the number of open todos
private def buildTodoMenu(props: Props): ReactNode = {
val todoCount = props.todos().count(!_.completed)
Seq(
<.span("Todo "),
if (todoCount > 0) <.span(bss.labelOpt(CommonStyle.danger), bss.labelAsBadge, todoCount) else <.span()
)
}
private val menuItems = Seq(
MenuItem(1, _ => "Dashboard", Icon.dashboard, DashboardLoc),
MenuItem(2, buildTodoMenu, Icon.check, TodoLoc)
)
private val MainMenu = ReactComponentB[Props]("MainMenu")
.stateless
.backend(new Backend(_))
.render((P, _, B) => {
<.ul(bss.navbar)(
// build a list of menu items
for (item <- menuItems) yield {
<.li(^.key := item.idx, (P.currentLoc == item.location) ?= (^.className := "active"),
P.ctl.link(item.location)(item.icon, " ", item.label(P))
)
}
)
})
.componentDidMount(_.backend.mounted())
.build
def apply(props: Props) = MainMenu(props)
}
| insdami/scalajs-spa-tutorial | client/src/main/scala/spatutorial/client/modules/MainMenu.scala | Scala | apache-2.0 | 2,188 |
import language.experimental.namedTypeArguments
object Test {
def f[
T1 <: String,
T2 <: Int,
T3 <: Boolean
](a1: T1, a2: T2, a3: T3) = ()
f ("", 1, true)
f[T1 = String] ("", 1, true)
f[T2 = Int] ("", 1, true)
f[T3 = Boolean] ("", 1, true)
f[T1 = String, T2 = Int] ("", 1, true)
f[T1 = String, T3 = Boolean] ("", 1, true)
f[T2 = Int, T1 = String] ("", 1, true)
f[T2 = Int, T3 = Boolean] ("", 1, true)
f[T3 = Boolean, T2 = Int] ("", 1, true)
f[T3 = Boolean, T1 = String] ("", 1, true)
f[T1 = String, T2 = Int, T3 = Boolean]("", 1, true)
f[T1 = String, T3 = Boolean, T2 = Int] ("", 1, true)
f[T2 = Int, T1 = String, T3 = Boolean]("", 1, true)
f[T2 = Int, T3 = Boolean, T1 = String] ("", 1, true)
f[T3 = Boolean, T1 = String, T2 = Int] ("", 1, true)
f[T3 = Boolean, T2 = Int, T1 = String] ("", 1, true)
f[String, Int, Boolean] ("", 1, true)
}
| dotty-staging/dotty | tests/pos/t1513b.scala | Scala | apache-2.0 | 1,161 |
package net.sf.latexdraw.instruments
import java.awt.geom.Point2D
import java.awt.geom.Rectangle2D
import java.awt.BasicStroke
import java.awt.Color
import java.awt.Cursor
import java.awt.Graphics2D
import scala.collection.mutable.ListBuffer
import org.malai.instrument.Link
import org.malai.instrument.Instrument
import org.malai.interaction.library.DnD
import org.malai.mapping.MappingRegistry
import org.malai.picking.Pickable
import org.malai.picking.Picker
import net.sf.latexdraw.actions.shape.MoveCtrlPoint
import net.sf.latexdraw.actions.shape.MovePointShape
import net.sf.latexdraw.actions.shape.ModifyShapeProperty
import net.sf.latexdraw.actions.shape.RotateShapes
import net.sf.latexdraw.actions.shape.ScaleShapes
import net.sf.latexdraw.actions.shape.ShapeProperties
import net.sf.latexdraw.badaboom.BadaboomCollector
import net.sf.latexdraw.glib.handlers.ArcAngleHandler
import net.sf.latexdraw.glib.handlers.CtrlPointHandler
import net.sf.latexdraw.glib.handlers.IHandler
import net.sf.latexdraw.glib.handlers.MovePtHandler
import net.sf.latexdraw.glib.handlers.RotationHandler
import net.sf.latexdraw.glib.handlers.ScaleHandler
import net.sf.latexdraw.glib.models.interfaces.IShape.Position
import net.sf.latexdraw.glib.models.interfaces.IShape
import net.sf.latexdraw.glib.models.interfaces.DrawingTK
import net.sf.latexdraw.glib.models.interfaces.IArc
import net.sf.latexdraw.glib.models.interfaces.IControlPointShape
import net.sf.latexdraw.glib.models.interfaces.IGroup
import net.sf.latexdraw.glib.models.interfaces.IModifiablePointsShape
import net.sf.latexdraw.glib.models.interfaces.IPoint
import net.sf.latexdraw.glib.ui.ICanvas
import net.sf.latexdraw.glib.views.Java2D.interfaces.IViewArc
import net.sf.latexdraw.glib.views.Java2D.interfaces.IViewBezierCurve
import net.sf.latexdraw.glib.views.Java2D.interfaces.IViewModifiablePtsShape
import net.sf.latexdraw.glib.views.Java2D.interfaces.IViewShape
import net.sf.latexdraw.mapping.Shape2BorderMapping
import net.sf.latexdraw.util.LNumber
import net.sf.latexdraw.glib.models.impl.LDrawing
import org.malai.action.Action
import net.sf.latexdraw.actions.shape.RotateShapes
import net.sf.latexdraw.actions.shape.TranslateShapes
/**
* This instrument manages the selected views.<br>
* <br>
* This file is part of LaTeXDraw<br>
* Copyright (c) 2005-2013 Arnaud BLOUIN<br>
* <br>
* LaTeXDraw is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.<br>
* <br>
* LaTeXDraw is distributed without any warranty; without even the
* implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
* PURPOSE. See the GNU General Public License for more details.<br>
* <br>
* 2012-04-20<br>
* @author Arnaud BLOUIN
* @version 3.0
*/
class Border(val canvas : ICanvas) extends Instrument with Picker {
/** The stroke uses by the border to display its bounding rectangle. */
val stroke = new BasicStroke(2f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER, 1f, Array(7, 7), 0)
/** The selected views. */
protected val _selection : ListBuffer[IViewShape] = new ListBuffer()
/** The rectangle uses to show the selection. */
protected val _border : Rectangle2D = new Rectangle2D.Double()
/** The handlers that scale shapes. */
protected val _scaleHandlers : ListBuffer[IHandler] = new ListBuffer()
/** The handlers that move points. */
protected lazy val _mvPtHandlers : ListBuffer[IHandler] = new ListBuffer()
/** The handlers that move first control points. */
protected lazy val _ctrlPt1Handlers : ListBuffer[IHandler] = new ListBuffer()
/** The handlers that move second control points. */
protected lazy val _ctrlPt2Handlers : ListBuffer[IHandler] = new ListBuffer()
// /** The handler that sets the arc frame. */
// protected lazy val frameArcHandler : FrameArcHandler = new FrameArcHandler()
/** The handler that sets the start angle of an arc. */
protected lazy val _arcHandlerStart : ArcAngleHandler = new ArcAngleHandler(true)
/** The handler that sets the end angle of an arc. */
protected lazy val _arcHandlerEnd : ArcAngleHandler = new ArcAngleHandler(false)
/** The handler that rotates shapes. */
protected val _rotHandler : IHandler = new RotationHandler()
protected var _metaCustomiser : MetaShapeCustomiser = null
// Initialisation of the handlers that are always used.
_scaleHandlers += new ScaleHandler(Position.NW)
_scaleHandlers += new ScaleHandler(Position.NORTH)
_scaleHandlers += new ScaleHandler(Position.NE)
_scaleHandlers += new ScaleHandler(Position.WEST)
_scaleHandlers += new ScaleHandler(Position.EAST)
_scaleHandlers += new ScaleHandler(Position.SW)
_scaleHandlers += new ScaleHandler(Position.SOUTH)
_scaleHandlers += new ScaleHandler(Position.SE)
def scaleHandlers = _scaleHandlers
def mvPtHandlers = _mvPtHandlers
def ctrlPt1Handlers = _ctrlPt1Handlers
def ctrlPt2Handlers = _ctrlPt2Handlers
def arcHandlerStart = _arcHandlerStart
def arcHandlerEnd = _arcHandlerEnd
def rotHandler = _rotHandler
def border = _border
def setMetaCustomiser(metaCustomiser:MetaShapeCustomiser) { _metaCustomiser = metaCustomiser }
override def reinit() {
_selection.clear
_border.setFrame(0, 0, 1, 1)
}
override def interimFeedback() {
canvas.setCursor(Cursor.getDefaultCursor)
}
override def onActionDone(action:Action) {
if(_metaCustomiser!=null) {
action match {
case _:RotateShapes => _metaCustomiser.rotationCustomiser.update()
case _:ModifyShapeProperty => _metaCustomiser.arcCustomiser.update()
case _:MoveCtrlPoint => _metaCustomiser.dimPosCustomiser.update()
case _:MovePointShape => _metaCustomiser.dimPosCustomiser.update()
case _:ScaleShapes => _metaCustomiser.dimPosCustomiser.update()
case _ =>
}
}
}
/**
* Updates the bounding rectangle using the selected views.
* @since 3.0
*/
def update() {
if(!isActivated()) return
if(_selection.isEmpty)
_border.setFrame(0, 0, 1, 1)
else {
val zoomLevel = canvas.getZoom
var minX = Double.MaxValue
var minY = Double.MaxValue
var maxX = Double.MinValue
var maxY = Double.MinValue
_selection.foreach{view =>
val bounds = view.getBorder
if(bounds.getMinX<minX)
minX = bounds.getMinX
if(bounds.getMinY<minY)
minY = bounds.getMinY
if(bounds.getMaxX>maxX)
maxX = bounds.getMaxX
if(bounds.getMaxY>maxY)
maxY = bounds.getMaxY
}
_border.setFrame(minX*zoomLevel, minY*zoomLevel, (maxX-minX)*zoomLevel, (maxY-minY)*zoomLevel)
updateHandlersPosition
}
}
/**
* Updates the position of the handlers.
* @since 3.0
*/
private def updateHandlersPosition() {
_scaleHandlers.foreach{handler => handler.updateFromShape(_border)}
_rotHandler.setPoint(_border.getMaxX, _border.getMinY)
// if(isFrameArcHandlerShowable())
// frameArcHandler.updateFromLineArcShape((ILineArcShape)selection.get(0).getShape())
updateArcHandlers
updateMvHandlers
updateCtrlMvHandlers
}
/**
* Updates the arc handlers.
* @since 3.0
*/
private def updateArcHandlers() {
if(isArcHandlerShowable) {
val sh = _selection.apply(0).getShape
if(sh.isInstanceOf[IArc]) {
val arc = sh.asInstanceOf[IArc]
_arcHandlerStart.updateFromArc(arc, canvas.getZoom)
_arcHandlerEnd.updateFromArc(arc, canvas.getZoom)
}
}
}
/**
* Updates the handlers that move control points.
* @since 3.0
*/
private def updateCtrlMvHandlers() {
if(isCtrlPtMvHandlersShowable) {
val sh = _selection.apply(0).getShape
if(sh.isInstanceOf[IControlPointShape])
// Lazy initialisation
initialiseCtrlMvHandlers(sh.asInstanceOf[IControlPointShape])
}
}
private def initialiseCtrlMvHandlers(cps : IControlPointShape) {
val zoom = canvas.getZoom
val nbPts = cps.getNbPoints
var pt : IPoint = null
// Adding missing handlers.
if(_ctrlPt1Handlers.size<nbPts)
for(i <- _ctrlPt1Handlers.size to nbPts-1) {
_ctrlPt1Handlers += new CtrlPointHandler(i)
_ctrlPt2Handlers += new CtrlPointHandler(i)
}
// Removing extra handlers.
else if(_ctrlPt1Handlers.size>nbPts)
while(_ctrlPt1Handlers.size>nbPts) {
_ctrlPt1Handlers.remove(0)
_ctrlPt2Handlers.remove(0)
}
// Updating handlers.
for(i <- 0 to _ctrlPt1Handlers.size-1) {
pt = cps.getFirstCtrlPtAt(i)
_ctrlPt1Handlers.apply(i).setPoint(pt.getX*zoom, pt.getY*zoom)
pt = cps.getSecondCtrlPtAt(i)
_ctrlPt2Handlers.apply(i).setPoint(pt.getX*zoom, pt.getY*zoom)
}
}
/**
* Updates the handlers that move points.
* @since 3.0
*/
private def updateMvHandlers() {
if(isPtMvHandlersShowable) {
val sh = _selection.apply(0).getShape
if(sh.isInstanceOf[IModifiablePointsShape]) {
val pts = sh.asInstanceOf[IModifiablePointsShape]
val nbPts = pts.getNbPoints
val zoom = canvas.getZoom
var pt : IPoint = null
if(_mvPtHandlers.size<nbPts)
for(i <- _mvPtHandlers.size to nbPts-1)
_mvPtHandlers += new MovePtHandler(i)
else if(_mvPtHandlers.size>nbPts)
while(_mvPtHandlers.size>nbPts)
_mvPtHandlers.remove(0)
for(i <- 0 to _mvPtHandlers.size-1) {
pt = pts.getPtAt(i)
_mvPtHandlers.apply(i).setPoint(pt.getX*zoom, pt.getY*zoom)
}
}
}
}
/**
* Paints the border if activated.
* @param g The graphics in which the border is painted.
* @since 3.0
*/
def paint(g : Graphics2D) {
if(isActivated) {
g.setColor(Color.GRAY)
g.setStroke(stroke)
g.draw(_border)
paintHandlers(g)
}
}
/**
* Paints the required handlers.
*/
private def paintHandlers(g : Graphics2D) {
_scaleHandlers.foreach{handler => handler.paint(g)}
_rotHandler.paint(g)
// if(isFrameArcHandlerShowable())
// frameArcHandler.paint(g)
if(isArcHandlerShowable) {
_arcHandlerStart.paint(g)
_arcHandlerEnd.paint(g)
}
if(isPtMvHandlersShowable) {
_mvPtHandlers.foreach{mvHandler => mvHandler.paint(g)}
if(isCtrlPtMvHandlersShowable) {
_ctrlPt1Handlers.foreach{handler => handler.paint(g)}
_ctrlPt2Handlers.foreach{handler => handler.paint(g)}
}
}
}
/**
* @return True if the control move point handlers can be painted.
*/
protected def isCtrlPtMvHandlersShowable() = _selection.size==1 && _selection.apply(0).isInstanceOf[IViewBezierCurve]
/**
* @return True if the move point handlers can be painted.
*/
protected def isPtMvHandlersShowable() = _selection.size==1 && _selection.apply(0).isInstanceOf[IViewModifiablePtsShape]
/**
* @return True if the arc handlers can be painted.
*/
protected def isArcHandlerShowable() = _selection.size==1 && _selection.apply(0).isInstanceOf[IViewArc]
// /**
// * @return True if the frame arc handler can be painted.
// */
// protected boolean isFrameArcHandlerShowable() {
// return selection.size()==1 && selection.get(0).getShape() instanceof ILineArcShape
// }
/**
* Adds the given shape to the selection. If the instrument is
* activated and the addition is performed, the instrument is updated.
* @param view The view to add. If null, nothing is done.
* @since 3.0
*/
def add(view : IViewShape) {
if(view!=null) {
_selection += view
if(isActivated) {
// The border is updated only if the view has been added and
// the border is activated.
update
MappingRegistry.REGISTRY.addMapping(new Shape2BorderMapping(MappingRegistry.REGISTRY.getSourceFromTarget(view, classOf[IShape]), this))
}
}
}
/**
* Removes the given view from the selection. If the instrument is
* activated and the removal is performed, the instrument is updated.
* @param view The view to remove. If null or it is not
* already in the selection, nothing is performed.
* @since 3.0
*/
def remove(view : IViewShape) {
if(view!=null) {
_selection -= view
MappingRegistry.REGISTRY.removeMappingsUsingSource(MappingRegistry.REGISTRY.getSourceFromTarget(view, classOf[IShape]), classOf[Shape2BorderMapping])
update
}
}
/**
* @return the selected views. Cannot be null.
* @since 3.0
*/
def selection = _selection
override def initialiseLinks() {
try{
addLink(new DnD2Scale(this))
addLink(new DnD2MovePoint(this))
addLink(new DnD2MoveCtrlPoint(this))
addLink(new DnD2Rotate(this))
addLink(new DnD2ArcAngle(this))
}catch{case ex => BadaboomCollector.INSTANCE.add(ex)}
}
/**
* Removes all the selected views.
* @since 3.0
*/
def clear() {
if(!selection.isEmpty) {
selection.foreach{view =>
MappingRegistry.REGISTRY.removeMappingsUsingSource(MappingRegistry.REGISTRY.getSourceFromTarget(view, classOf[IShape]), classOf[Shape2BorderMapping])}
selection.clear
setActivated(false)
}
}
override def getPickableAt(x : Double, y : Double) : Pickable = {
var pickable : Option[Pickable] = None
if(activated) {
val zoom = canvas.getZoom
val x2 = x*zoom
val y2 = y*zoom
pickable = getHandlerAt(x2, y2, _scaleHandlers)
if(pickable.isEmpty && _rotHandler.contains(x2, y2))
pickable = Some(_rotHandler)
if(pickable.isEmpty)
pickable = getHandlerAt(x2, y2, _mvPtHandlers)
if(pickable.isEmpty)
pickable = getHandlerAt(x2, y2, _ctrlPt1Handlers)
if(pickable.isEmpty)
pickable = getHandlerAt(x2, y2, _ctrlPt2Handlers)
// if(pickable.isEmpty && _frameArcHandler!=null && _frameArcHandler.contains(x2, y2))
// pickable = Some(_frameArcHandler)
if(pickable.isEmpty && _arcHandlerStart!=null && _arcHandlerStart.contains(x2, y2))
pickable = Some(_arcHandlerStart)
if(pickable.isEmpty && _arcHandlerEnd!=null && _arcHandlerEnd.contains(x2, y2))
pickable = Some(_arcHandlerEnd)
}
if(pickable.isDefined)
return pickable.get
else return null
}
private def getHandlerAt(x : Double, y : Double, handlers : ListBuffer[IHandler]) : Option[IHandler] = {
handlers match {
case null => None
case _ => handlers.find{handler => handler.contains(x, y)}
}
}
override def getPickerAt(x : Double, y : Double) : Picker = null
// Supposing that there is no handler outside the border.
override def contains(obj : Object) = obj.isInstanceOf[IHandler]
}
/** Maps a DnD interaction to an action that changes the arc angles. */
private sealed class DnD2ArcAngle(ins : Border) extends Link[ModifyShapeProperty, DnD, Border](ins, true, classOf[ModifyShapeProperty], classOf[DnD]) {
/** The point corresponding to the 'press' position. */
private var p1 : IPoint = null
/** The gravity centre used for the rotation. */
private var gc : IPoint = null
/** Defines whether the current handled shape is rotated. */
private var isRotated = false
/** The current handled shape. */
private var shape : IShape = null
private var gap : IPoint = DrawingTK.getFactory.createPoint
def initAction() {
val drawing = instrument.canvas.getDrawing
if(drawing.getSelection.size()==1) {
shape = drawing.getSelection().getShapeAt(0)
val rotAngle = shape.getRotationAngle
var pCentre = interaction.getStartObject.asInstanceOf[IHandler].getCentre
var pt = DrawingTK.getFactory.createPoint(interaction.getStartPt)
gc = shape.getGravityCentre
gc = DrawingTK.getFactory.createPoint(gc.getX*instrument.canvas.getZoom, gc.getY*instrument.canvas.getZoom)
if(LNumber.INSTANCE.equals(rotAngle, 0))
isRotated = false
else {
pt = pt.rotatePoint(gc, -rotAngle)
pCentre = pCentre.rotatePoint(gc, -rotAngle)
isRotated = true
}
gap.setPoint(pt.getX-pCentre.getX, pt.getY-pCentre.getY)
if(interaction.getStartObject==instrument.arcHandlerStart)
action.setProperty(ShapeProperties.ARC_START_ANGLE)
else
action.setProperty(ShapeProperties.ARC_END_ANGLE)
action.setGroup(drawing.getSelection.duplicate.asInstanceOf[IGroup])
}
}
override def updateAction() {
var pt = DrawingTK.getFactory.createPoint(interaction.getEndPt)
if(isRotated)
pt = pt.rotatePoint(gc, -shape.getRotationAngle)
action.setValue(computeAngle(DrawingTK.getFactory.createPoint(pt.getX-gap.getX, pt.getY-gap.getY)))
}
private def computeAngle(position : IPoint) : Double = {
val angle = math.acos((position.getX-gc.getX)/position.distance(gc))
if(position.getY>gc.getY)
2*math.Pi - angle
else angle
}
override def isConditionRespected() = interaction.getStartObject==instrument.arcHandlerEnd || interaction.getStartObject==instrument.arcHandlerStart
}
/**
* This link maps a DnD interaction on a rotation handler to an action that rotates the selected shapes.
*/
private sealed class DnD2Rotate(ins : Border) extends Link[RotateShapes, DnD, Border](ins, true, classOf[RotateShapes], classOf[DnD]) {
/** The point corresponding to the 'press' position. */
private var p1 : IPoint = null
/** The gravity centre used for the rotation. */
private var gc : IPoint = null
def initAction() {
val drawing = instrument.canvas.getDrawing
p1 = DrawingTK.getFactory.createPoint(instrument.canvas.getZoomedPoint(interaction.getStartPt))
gc = drawing.getSelection.getGravityCentre
action.setGravityCentre(gc)
action.setShape(drawing.getSelection.duplicate)
}
override def updateAction() {
val p2 = DrawingTK.getFactory.createPoint(instrument.canvas.getZoomedPoint(interaction.getEndPt))
action.setRotationAngle(gc.computeRotationAngle(p1, p2))
}
override def isConditionRespected() = interaction.getStartObject==instrument.rotHandler
}
/**
* This link maps a DnD interaction on a move control point handler to an action that moves the selected control point.
*/
private sealed class DnD2MoveCtrlPoint(ins : Border) extends Link[MoveCtrlPoint, DnD, Border](ins, true, classOf[MoveCtrlPoint], classOf[DnD]) {
/** The original coordinates of the moved point. */
private var sourcePt : IPoint = null
override def initAction() {
val group = instrument.canvas.getDrawing.getSelection
if(group.size==1 && group.getShapeAt(0).isInstanceOf[IControlPointShape]) {
val handler = ctrlPtHandler.get
sourcePt = DrawingTK.getFactory.createPoint(handler.getCentre)
action.setIndexPt(handler.getIndexPt)
action.setShape(group.getShapeAt(0).asInstanceOf[IControlPointShape])
action.setIsFirstCtrlPt(instrument.ctrlPt1Handlers.contains(interaction.getStartObject))
}
}
override def updateAction() {
super.updateAction
val startPt = interaction.getStartPt
val endPt = interaction.getEndPt
val x = sourcePt.getX + endPt.getX-startPt.getX
val y = sourcePt.getY + endPt.getY-startPt.getY
action.setNewCoord(instrument.canvas.getMagneticGrid.getTransformedPointToGrid(instrument.canvas.getZoomedPoint(x, y)))
}
override def isConditionRespected() = ctrlPtHandler.isDefined
/**
* @return The selected move control point handler or null.
* @since 3.0
*/
private def ctrlPtHandler : Option[CtrlPointHandler] = {
val obj = interaction.getStartObject
obj.isInstanceOf[CtrlPointHandler] &&
(instrument.ctrlPt1Handlers.contains(obj) || instrument.ctrlPt2Handlers.contains(obj)) match {
case true => Some(obj.asInstanceOf[CtrlPointHandler])
case false => None
}
}
}
/**
* This link maps a DnD interaction on a move point handler to an action that moves the selected point.
*/
private sealed class DnD2MovePoint(ins : Border) extends Link[MovePointShape, DnD, Border](ins, true, classOf[MovePointShape], classOf[DnD]) {
/** The original coordinates of the moved point. */
private var sourcePt : IPoint = null
override def initAction() {
val group = instrument.canvas.getDrawing.getSelection
if(group.size==1 && group.getShapeAt(0).isInstanceOf[IModifiablePointsShape]) {
val handler = movePtHandler.get
sourcePt = DrawingTK.getFactory.createPoint(handler.getCentre)
action.setIndexPt(handler.getIndexPt)
action.setShape(group.getShapeAt(0).asInstanceOf[IModifiablePointsShape])
}
}
override def updateAction() {
super.updateAction
val startPt = interaction.getStartPt
val endPt = interaction.getEndPt
val x = sourcePt.getX + endPt.getX-startPt.getX
val y = sourcePt.getY + endPt.getY-startPt.getY
action.setNewCoord(instrument.canvas.getMagneticGrid.getTransformedPointToGrid(instrument.canvas.getZoomedPoint(x, y)))
}
override def isConditionRespected() = movePtHandler.isDefined
/**
* @return The selected move point handler or null.
* @since 3.0
*/
private def movePtHandler : Option[MovePtHandler] = {
val obj = interaction.getStartObject
obj.isInstanceOf[MovePtHandler] && instrument.mvPtHandlers.contains(obj) match {
case true => Some(obj.asInstanceOf[MovePtHandler])
case false => None
}
}
}
/**
* This link maps a DnD interaction on a scale handler to an action that scales the selection.
*/
private sealed class DnD2Scale(ins : Border) extends Link[ScaleShapes, DnD, Border](ins, true, classOf[ScaleShapes], classOf[DnD]) {
/** The point corresponding to the 'press' position. */
private var p1 : IPoint = null
/** The x gap (gap between the pressed position and the targeted position) of the X-scaling. */
private var xGap : Double = 0.0
/** The y gap (gap between the pressed position and the targeted position) of the Y-scaling. */
private var yGap : Double = 0.0
private def setXGap(refPosition : Position, tl : IPoint, br : IPoint) {
refPosition match {
case Position.NW | Position.SW | Position.WEST => xGap = p1.getX - br.getX
case Position.NE | Position.SE | Position.EAST => xGap = tl.getX - p1.getX
case _ => xGap = 0.0
}
}
private def setYGap(refPosition : Position, tl : IPoint, br : IPoint) {
refPosition match {
case Position.NW | Position.NE | Position.NORTH => yGap = p1.getY - br.getY
case Position.SW | Position.SE | Position.SOUTH => yGap = tl.getY - p1.getY
case _ => yGap = 0.0
}
}
override def initAction() {
val drawing = instrument.canvas.getDrawing
val refPosition = scaleHandler.get.getPosition.getOpposite
val br = drawing.getSelection.getBottomRightPoint
val tl = drawing.getSelection.getTopLeftPoint
p1 = instrument.canvas.getMagneticGrid.getTransformedPointToGrid(instrument.canvas.getZoomedPoint(interaction.getStartPt))
setXGap(refPosition, tl, br)
setYGap(refPosition, tl, br)
action.setDrawing(drawing)
action.setShape(drawing.getSelection.duplicate.asInstanceOf[IGroup])
action.refPosition = refPosition
}
override def updateAction() {
super.updateAction
val pt = instrument.canvas.getMagneticGrid.getTransformedPointToGrid(instrument.canvas.getZoomedPoint(interaction.getEndPt))
val refPosition = action.refPosition.get
if(refPosition.isSouth)
action.newY = pt.getY + yGap
else if(refPosition.isNorth)
action.newY = pt.getY - yGap
if(refPosition.isWest)
action.newX = pt.getX - xGap
else if(refPosition.isEast)
action.newX = pt.getX + xGap
}
override def isConditionRespected() = scaleHandler.isDefined
override def interimFeedback() {
super.interimFeedback
action.refPosition.get match {
case Position.EAST => instrument.canvas.setCursor(Cursor.getPredefinedCursor(Cursor.W_RESIZE_CURSOR))
case Position.NE => instrument.canvas.setCursor(Cursor.getPredefinedCursor(Cursor.SW_RESIZE_CURSOR))
case Position.NORTH => instrument.canvas.setCursor(Cursor.getPredefinedCursor(Cursor.S_RESIZE_CURSOR))
case Position.NW => instrument.canvas.setCursor(Cursor.getPredefinedCursor(Cursor.SE_RESIZE_CURSOR))
case Position.SE => instrument.canvas.setCursor(Cursor.getPredefinedCursor(Cursor.NW_RESIZE_CURSOR))
case Position.SOUTH => instrument.canvas.setCursor(Cursor.getPredefinedCursor(Cursor.N_RESIZE_CURSOR))
case Position.SW => instrument.canvas.setCursor(Cursor.getPredefinedCursor(Cursor.NE_RESIZE_CURSOR))
case Position.WEST => instrument.canvas.setCursor(Cursor.getPredefinedCursor(Cursor.E_RESIZE_CURSOR))
}
}
private def scaleHandler : Option[ScaleHandler] = {
val obj = interaction.getStartObject
obj.isInstanceOf[ScaleHandler] && instrument.scaleHandlers.contains(obj) match {
case true => Some(obj.asInstanceOf[ScaleHandler])
case false => None
}
}
}
| arnobl/latexdraw-mutants | GUImutants/original/net.sf.latexdraw/src/main/net/sf/latexdraw/instruments/Border.scala | Scala | gpl-2.0 | 24,146 |
package com.twitter.finagle.netty4
import com.twitter.io.Buf
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
import org.scalatest.funsuite.AnyFunSuite
object ReadableBufProcessorTest {
trait CanProcess {
def process(from: Int, until: Int, processor: Buf.Processor): Int
def process(processor: Buf.Processor): Int
def readBytes(num: Int): Unit
def readerIndex(): Int
}
def newExceptionalProcessor(): Buf.Processor = new Buf.Processor {
def apply(byte: Byte): Boolean = throw new Exception("boom!")
}
def newConsumeAllProcessor = new Buf.Processor {
def apply(byte: Byte): Boolean = true
}
def newStopAtProcessor(stopAt: Int) = new Buf.Processor {
var idx = 0
def apply(byte: Byte): Boolean = {
if (idx == stopAt) false
else {
idx += 1
true
}
}
}
}
abstract class ReadableBufProcessorTest(
processable: String,
newProcessable: (Array[Byte] => ReadableBufProcessorTest.CanProcess))
extends AnyFunSuite
with ScalaCheckDrivenPropertyChecks {
import ReadableBufProcessorTest._
test(s"$processable: process throws exception when `from` < 0") {
val b = newProcessable(new Array[Byte](3))
intercept[IllegalArgumentException] {
b.process(-1, 2, newConsumeAllProcessor)
}
}
test(s"$processable: process throws exception when `to` < 0") {
val b = newProcessable(new Array[Byte](3))
intercept[IllegalArgumentException] {
b.process(1, -2, newConsumeAllProcessor)
}
}
test(s"$processable: process returns -1 when empty underlying") {
val b = newProcessable(new Array[Byte](0))
assert(b.process(newExceptionalProcessor()) == -1)
}
test(s"$processable: process returns -1 when until < from") {
val b = newProcessable(new Array[Byte](3))
assert(b.process(2, 1, newExceptionalProcessor()) == -1)
}
test(s"$processable: process returns -1 when `from` > readable bytes") {
forAll { bytes: Array[Byte] =>
val b = newProcessable(bytes)
val read = Math.floor(bytes.length / 2).toInt
b.readBytes(read)
val readable = bytes.length - read
assert(b.process(readable + 1, readable + 2, newExceptionalProcessor()) == -1)
}
}
test(s"$processable: process returns index where processing stopped") {
forAll { bytes: Array[Byte] =>
val b = newProcessable(bytes)
assert(b.process(newStopAtProcessor(0)) == (if (bytes.isEmpty) -1 else 0))
if (bytes.length <= 3) {
assert(-1 == b.process(newStopAtProcessor(3)))
} else {
assert(3 == b.process(newStopAtProcessor(3)))
if (bytes.length > 10) {
assert(4 == b.process(1, 5, newStopAtProcessor(3)))
assert(5 == b.process(2, 9, newStopAtProcessor(3)))
assert(-1 == b.process(0, 3, newStopAtProcessor(3)))
}
}
}
}
test("process returns -1 when fully processed") {
forAll { bytes: Array[Byte] =>
val b = newProcessable(bytes)
var n = 0
val processor = new Buf.Processor {
def apply(byte: Byte): Boolean = {
n += 1
true
}
}
assert(b.process(processor) == -1)
assert(bytes.length == n)
}
}
test(s"$processable: process will not go past the end of the buf") {
forAll { bytes: Array[Byte] =>
whenever(bytes.length > 0) {
val b = newProcessable(bytes)
assert(b.process(0, bytes.length, newConsumeAllProcessor) == -1)
assert(b.process(0, bytes.length + 1, newConsumeAllProcessor) == -1)
assert(b.process(1, bytes.length + 10, newConsumeAllProcessor) == -1)
}
}
}
test(s"$processable: process will not fail if offset > readable bytes") {
val b = newProcessable(new Array[Byte](20))
b.readBytes(10)
assert(b.process(2, 3, newConsumeAllProcessor) == -1)
}
test(s"$processable: process handles readerIndex") {
forAll { bytes: Array[Byte] =>
whenever(bytes.length > 1) {
val b = newProcessable(bytes)
b.readBytes(0)
assert(b.process(newStopAtProcessor((0))) == 0)
if (bytes.length >= 2) {
assert(b.process(1, 2, newStopAtProcessor(0)) == 1)
}
}
}
}
test(s"$processable: process does not change readerIndex") {
forAll { bytes: Array[Byte] =>
whenever(bytes.length > 0) {
val b = newProcessable(bytes)
assert(b.readerIndex == 0)
assert(b.process(newConsumeAllProcessor) == -1)
assert(b.readerIndex == 0)
}
}
}
}
| twitter/finagle | finagle-netty4/src/test/scala/com/twitter/finagle/netty4/ReadableBufProcessorTest.scala | Scala | apache-2.0 | 4,543 |
package spark.streaming.sql
import spark.RDD
import spark.streaming.{Time, Duration}
private[streaming]
class SQLBinaryTransformedDStream[T : ClassManifest](
leftParent: SQLOperatorDStream[T],
rightParent: SQLOperatorDStream[T],
func : ((RDD[T], RDD[T]) => RDD[T])
) extends SQLOperatorDStream[T](leftParent.ssc) {
if (leftParent.ssc != rightParent.ssc) {
throw new IllegalArgumentException("Parents have different StreamingContexts")
}
if (leftParent.slideDuration != rightParent.slideDuration) {
throw new IllegalArgumentException("Parents have different slide times")
}
override def dependencies = List(leftParent,rightParent)
override def slideDuration: Duration = leftParent.slideDuration
override def compute(validTime: Time, child : SQLOperatorDStream[T], amILeftParent : Boolean): Option[RDD[T]] = {
val leftRDD = leftParent.getOrCompute(validTime, this, true)
val rightRDD = rightParent.getOrCompute(validTime, this, false)
if(leftRDD == None || rightRDD == None)
None
else
Some(func(leftRDD.get, rightRDD.get))
}
}
| pxgao/spark-0.7.3 | streaming/src/main/scala/spark/streaming/sql/SQLBinaryTransformedDStream.scala | Scala | bsd-3-clause | 1,096 |
package authentication.deadbolt.config
import authentication.models.SecurityUser
import authentication.oauth2.PlayWithFoodAuthorizationHandler
import be.objectify.deadbolt.scala.models.Subject
import be.objectify.deadbolt.scala.{AuthenticatedRequest, DeadboltHandler, DynamicResourceHandler}
import javax.inject.Inject
import play.api.mvc.{Request, Result, Results}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scalaoauth2.provider.{AuthInfo, OAuth2ProtectedResourceProvider}
import be.objectify.deadbolt.scala.models.{Permission, Role}
private[authentication] class OAuthDeadboltHandler(dataHandler: PlayWithFoodAuthorizationHandler)
extends DeadboltHandler
with OAuth2ProtectedResourceProvider {
val dynamicHandler: Option[DynamicResourceHandler] = Option.empty
override def beforeAuthCheck[A](request: Request[A]): Future[Option[Result]] = Future(None)
override def getDynamicResourceHandler[A](request: Request[A]): Future[Option[DynamicResourceHandler]] = Future.successful(dynamicHandler)
override def getSubject[A](request: AuthenticatedRequest[A]): Future[Option[Subject]] =
request.subject match {
case Some(_) => Future.successful(request.subject)
case _ => protectedResource.handleRequest(request, dataHandler).map {
case Left(_) => None
case Right(authInfo: AuthInfo[AccountInfo]) => Some(new OAuthSubject(authInfo))
}
}
override def onAuthFailure[A](request: AuthenticatedRequest[A]): Future[Result] = Future(Results.Unauthorized)
}
private[authentication] case class AccountInfo(username: String)
private[authentication] class OAuthSubject(authInfo: AuthInfo[AccountInfo]) extends Subject {
val scopes = OAuthScope(authInfo.scope.get)
override def identifier: String = authInfo.user.username
override def permissions: List[be.objectify.deadbolt.scala.models.Permission] = scopes
override def roles: List[be.objectify.deadbolt.scala.models.Role] = scopes
}
private[authentication] class OAuthScope(scope: String) extends Role with Permission {
override def name: String = scope
override def value: String = scope
}
private[authentication] object OAuthScope {
def apply(scopeString: String): List[OAuthScope] = scopeString.split(",").toList.map(s => new OAuthScope(s.trim))
} | Dasiu/play-framework-test-project | app/authentication/deadbolt/config/OAuthDeadboltHandler.scala | Scala | mit | 2,331 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.stream
import java.math.BigDecimal
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.{TableEnvironment, TableException}
import org.apache.flink.table.api.scala._
import org.apache.flink.table.runtime.stream.TimeAttributesITCase.TimestampWithEqualWatermark
import org.apache.flink.table.utils.TableTestBase
import org.junit.Test
class StreamTableEnvironmentValidationTest extends TableTestBase {
@Test(expected = classOf[TableException])
def testInvalidTimeAttributes(): Unit = {
val util = streamTestUtil()
// table definition makes no sense
util.addTable[(Long, Int, String, Int, Long)]('a.rowtime.rowtime, 'b, 'c, 'd, 'e)
}
@Test(expected = classOf[TableException])
def testInvalidProctimeAttribute(): Unit = {
val util = streamTestUtil()
// cannot replace an attribute with proctime
util.addTable[(Long, Int, String, Int, Long)]('a, 'b.proctime, 'c, 'd, 'e)
}
@Test(expected = classOf[TableException])
def testRowtimeAttributeReplaceFieldOfInvalidType(): Unit = {
val util = streamTestUtil()
// cannot replace a non-time attribute with rowtime
util.addTable[(Long, Int, String, Int, Long)]('a, 'b, 'c.rowtime, 'd, 'e)
}
@Test(expected = classOf[TableException])
def testRowtimeAndInvalidProctimeAttribute(): Unit = {
val util = streamTestUtil()
util.addTable[(Long, Int, String, Int, Long)]('rt.rowtime, 'b, 'c, 'd, 'pt.proctime)
}
@Test(expected = classOf[TableException])
def testOnlyOneRowtimeAttribute1(): Unit = {
val util = streamTestUtil()
util.addTable[(Long, Int, String, Int, Long)]('a.rowtime, 'b, 'c, 'd, 'e, 'rt.rowtime)
}
@Test(expected = classOf[TableException])
def testOnlyOneProctimeAttribute1(): Unit = {
val util = streamTestUtil()
util.addTable[(Long, Int, String, Int, Long)]('a, 'b, 'c, 'd, 'e, 'pt1.proctime, 'pt2.proctime)
}
@Test(expected = classOf[TableException])
def testRowtimeAttributeUsedName(): Unit = {
val util = streamTestUtil()
util.addTable[(Long, Int, String, Int, Long)]('a, 'b, 'c, 'd, 'e, 'a.rowtime)
}
@Test(expected = classOf[TableException])
def testProctimeAttributeUsedName(): Unit = {
val util = streamTestUtil()
util.addTable[(Long, Int, String, Int, Long)]('a, 'b, 'c, 'd, 'e, 'b.proctime)
}
@Test(expected = classOf[TableException])
def testAsWithToManyFields(): Unit = {
val util = streamTestUtil()
util.addTable[(Int, Long, String)]('a, 'b, 'c, 'd)
}
@Test(expected = classOf[TableException])
def testAsWithAmbiguousFields(): Unit = {
val util = streamTestUtil()
util.addTable[(Int, Long, String)]('a, 'b, 'b)
}
@Test(expected = classOf[TableException])
def testOnlyFieldRefInAs(): Unit = {
val util = streamTestUtil()
util.addTable[(Int, Long, String)]('a, 'b as 'c, 'd)
}
@Test(expected = classOf[TableException])
def testInvalidTimeCharacteristic(): Unit = {
val data = List((1L, 1, 1d, 1f, new BigDecimal("1"), "Hi"))
val env = StreamExecutionEnvironment.getExecutionEnvironment
val tEnv = TableEnvironment.getTableEnvironment(env)
val stream = env
.fromCollection(data)
.assignTimestampsAndWatermarks(new TimestampWithEqualWatermark())
stream.toTable(tEnv, 'rowtime.rowtime, 'int, 'double, 'float, 'bigdec, 'string)
}
}
| zimmermatt/flink | flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/stream/StreamTableEnvironmentValidationTest.scala | Scala | apache-2.0 | 4,233 |
package org.fusesource.scalate.jruby
import org.fusesource.scalate.util.Log
import java.io.{ StringWriter, File }
import org.jruby.RubyInstanceConfig
import org.jruby.embed.{ LocalContextScope, ScriptingContainer }
import collection.JavaConverters._
/**
* A simple interface to the jruby interpreter
*/
class JRuby(loadPaths: List[File]) extends Log {
var container = new ScriptingContainer(LocalContextScope.SINGLETON)
container.getProvider.setLoadPaths(loadPaths.asJava)
container.setCompileMode(RubyInstanceConfig.CompileMode.JIT)
RubyInstanceConfig.FASTEST_COMPILE_ENABLED = true
RubyInstanceConfig.FASTSEND_COMPILE_ENABLED = true
RubyInstanceConfig.INLINE_DYNCALL_ENABLED = true
def run(scriptlet: String*): Either[(Throwable, String), AnyRef] = this.synchronized {
var errors: StringWriter = new StringWriter
try {
container.setErrorWriter(errors)
Right(container.runScriptlet(scriptlet.mkString("\\n")))
} catch {
case e: Throwable =>
Left((e, errors.toString))
}
}
def put(name: String, value: AnyRef) = container.put(name, value)
} | maslovalex/scalate | scalate-jruby/src/main/scala/org/fusesource/scalate/jruby/JRuby.scala | Scala | apache-2.0 | 1,108 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Sun Sep 16 14:09:25 EDT 2012
* @see LICENSE (MIT style license file).
*/
package scalation.linalgebra
import scala.collection.{breakOut, Traversable}
import scala.collection.mutable.{IndexedSeq, WrappedArray}
import scala.util.Sorting.quickSort
import scalation.math.Complex.{abs => ABS, max => MAX, _}
import scalation.math.Complex
import scalation.math.ExtremeD.TOL
import scalation.util.Error
import scalation.util.SortingC
import scalation.util.SortingC.{iqsort, qsort2}
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `VectorC` class stores and operates on Numeric Vectors of base type `Complex`.
* It follows the framework of `gen.VectorN [T]` and is provided for performance.
* @param dim the dimension/size of the vector
* @param v the 1D array used to store vector elements
*/
class VectorC (val dim: Int,
protected var v: Array [Complex] = null)
extends VectoC
// extends Traversable [Complex] with PartiallyOrdered [VectorC] with Vec with Error with Serializable
{
if (v == null) {
v = Array.ofDim [Complex] (dim)
} else if (dim != v.length) {
flaw ("constructor", "vector dimension is wrong: dim " + dim + " != v.length " + v.length)
} // if
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Construct a vector and assign values from vector 'u'.
* @param u the other vector
*/
def this (u: VectoC) { this (u.dim); for (i <- range) v(i) = u(i) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Construct a vector and assign 'value' at 'index' position.
* @param iv the tuple containing (index, value)
* @param dm the dimension for the new vector
*/
def this (iv: Tuple2 [Int, Complex], dm: Int) { this (dm); v(iv._1) = iv._2 }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create an exact copy of 'this' vector.
*/
def copy: VectorC = new VectorC (this)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a zero vector (all elements are zero) of length 'size'.
* @param size the number of elements in the vector
*/
def zero (size: Int = dim): VectorC = new VectorC (size)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a one vector (all elements are one) of length 'size'.
* @param size the number of elements in the vector
*/
def one (size: Int = dim): VectorC = new VectorC (size, Array.fill (size)(_1))
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a vector of the form (0, ... 1, ... 0) where the 1 is at position j.
* @param j the position to place the 1
* @param size the size of the vector (upper bound = size - 1)
*/
def oneAt (j: Int, size: Int = dim): VectorC = new VectorC ((j, _1), size)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a vector of the form (0, ... -1, ... 0) where the -1 is at position j.
* @param j the position to place the -1
* @param size the size of the vector (upper bound = size - 1)
*/
def _oneAt (j: Int, size: Int = dim): VectorC = new VectorC ((j, -_1), size)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get 'this' vector's element at the 'i'-th index position.
* @param i the given index
*/
def apply (i: Int): Complex = v(i)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get 'this' vector's elements within the given range (vector slicing).
* @param r the given range
*/
def apply (r: Range): VectorC = slice (r.start, r.end)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get 'this' vector's entire array.
*/
def apply (): WrappedArray [Complex] = v
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set 'this' vector's element at the 'i'-th index position.
* @param i the given index
* @param x the value to assign
*/
def update (i: Int, x: Complex) { v(i) = x }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set 'this' vector's elements over the given range (vector slicing).
* @param r the given range
* @param x the value to assign
*/
def update (r: Range, x: Complex) { for (i <- r) v(i) = x }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set 'this' vector's elements over the given range (vector slicing).
* @param r the given range
* @param u the vector to assign
*/
def update (r: Range, u: VectoC) { for (i <- r) v(i) = u(i - r.start) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set each value in 'this' vector to 'x'.
* @param x the value to be assigned
*/
def set (x: Complex) { for (i <- range) v(i) = x }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set the values in 'this' vector to the values in sequence/array 'u'.
* @param u the sequence/array of values to be assigned
*/
def set (u: Seq [Complex]) { for (i <- range) v(i) = u(i) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Iterate over 'this' vector element by element.
* @param f the function to apply
*/
def foreach [U] (f: Complex => U) { var i = 0; while (i < dim) { f (v(i)); i += 1 } }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert 'this' `VectorC` into a `VectorI`.
*/
def toInt: VectorI = VectorI (v.map (_.toInt))
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert 'this' `VectorC` into a `VectorL`.
*/
def toLong: VectorL = VectorL (v.map (_.toLong))
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert 'this' `VectorC` into a `VectorD`.
*/
def toDouble: VectorD = VectorD (v.map (_.toDouble))
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert 'this' `VectorC` into a dense `VectorC`.
*/
def toDense: VectorC = this
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Expand the size (dim) of 'this' vector by 'more' elements.
* @param more the number of new elements to add
*/
def expand (more: Int = dim): VectorC =
{
if (more < 1) this // no change
else new VectorC (dim + more, Array.concat (v, new Array [Complex] (more)))
} // expand
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Filter the elements of 'this' vector based on the predicate 'p', returning
* a new vector.
* @param p the predicate (`Boolean` function) to apply
*/
override def filter (p: Complex => Boolean): VectorC = VectorC (v.filter (p))
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Filter the elements of 'this' vector based on the predicate 'p', returning
* the index positions.
* @param p the predicate (`Boolean` function) to apply
*/
def filterPos (p: Complex => Boolean): IndexedSeq [Int] =
{
(for (i <- range if p (v(i))) yield i)(breakOut)
} // filterPos
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Filter the elements of 'this' vector based on the binary predicate 'p',
* returning the index positions.
* @param v2 the other vector to compare with
* @param p the binary predicate (Boolean function, between two elements) to apply
*/
def filterPos2 (v2: VectoC, p: (Complex, Complex) => Boolean): IndexedSeq [(Int, Int)] =
{
var result = IndexedSeq [(Int, Int)] ()
for (i <- range; j <- v2.range if p(v(i), v2(j))) result = result :+ (i, j)
result
} // filterPos2
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Map the elements of 'this' vector by applying the mapping function 'f'.
* @param f the function to apply
*/
def map (f: Complex => Complex): VectorC = VectorC (v.map (f))
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slice 'this' vector 'from' to 'end'.
* @param from the start of the slice (included)
* @param till the end of the slice (excluded)
*/
override def slice (from: Int, till: Int = dim): VectorC = new VectorC (till - from, v.slice (from, till))
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Select a subset of elements of 'this' vector corresponding to a 'basis'.
* @param basis the set/array of index positions (e.g., 0, 2, 5)
*/
def select (basis: Array [Int]): VectorC =
{
val c = new VectorC (basis.length)
for (i <- c.range) c.v(i) = v(basis(i))
c
} // select
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Select a subset of elements of 'this' vector corresponding to a 'basis'.
* @param basis the set/vector of index positions (e.g., 0, 2, 5)
*/
def select (basis: VectoI): VectorC =
{
val c = new VectorC (basis.dim)
for (i <- c.range) c.v(i) = v(basis(i))
c
} // select
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate 'this' vector and vector' b'.
* @param b the vector to be concatenated (any kind)
*/
def ++ (b: VectoC): VectorC =
{
val c = new VectorC (dim + b.dim)
for (i <- c.range) c.v(i) = if (i < dim) v(i) else b(i - dim)
c
} // ++
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate 'this' vector and vector' b'.
* @param b the vector to be concatenated (same kind, more efficient)
*/
def ++ (b: VectorC): VectorC =
{
val c = new VectorC (dim + b.dim)
for (i <- c.range) c.v(i) = if (i < dim) v(i) else b.v(i - dim)
c
} // ++
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate 'this' vector and scalar 's'.
* @param s the scalar to be concatenated
*/
def ++ (s: Complex): VectorC =
{
val c = new VectorC (dim + 1)
for (i <- c.range) c.v(i) = if (i < dim) v(i) else s
c
} // ++
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add 'this' vector and vector 'b'.
* @param b the vector to add (any kind)
*/
def + (b: VectoC): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = v(i) + b(i)
c
} // +
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add 'this' vector and vector 'b'.
* @param b the vector to add (same kind, more efficient)
*/
def + (b: VectorC): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = v(i) + b.v(i)
c
} // +
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add 'this' vector and scalar 's'.
* @param s the scalar to add
*/
def + (s: Complex): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = v(i) + s
c
} // +
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add 'this' vector and scalar 's._2' only at position 's._1'.
* @param s the (position, scalar) to add
*/
def + (s: Tuple2 [Int, Complex]): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = if (i == s._1) v(i) + s._2 else v(i)
c
} // +
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add in-place 'this' vector and vector 'b'.
* @param b the vector to add (any kind)
*/
def += (b: VectoC): VectorC = { for (i <- range) v(i) += b(i); this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add in-place 'this' vector and vector 'b'.
* @param b the vector to add (same kind, more efficient)
*/
def += (b: VectorC): VectorC = { for (i <- range) v(i) += b.v(i); this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add in-place 'this' vector and scalar 's'.
* @param s the scalar to add
*/
def += (s: Complex): VectorC = { for (i <- range) v(i) += s; this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the negative of 'this' vector (unary minus).
*/
def unary_- (): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = -v(i)
c
} // unary_-
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' vector subtract vector 'b'.
* @param b the vector to subtract (any kind)
*/
def - (b: VectoC): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = v(i) - b(i)
c
} // -
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' vector subtract vector 'b'.
* @param b the vector to subtract (same kind, more efficient)
*/
def - (b: VectorC): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = v(i) - b.v(i)
c
} // -
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' vector subtract scalar 's'.
* @param s the scalar to subtract
*/
def - (s: Complex): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = v(i) - s
c
} // -
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' vector subtract scalar 's._2' only at position 's._1'.
* @param s the (position, scalar) to subtract
*/
def - (s: Tuple2 [Int, Complex]): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = if (i == s._1) v(i) - s._2 else v(i)
c
} // -
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' vector subtract in-place vector 'b'.
* @param b the vector to add (any kind)
*/
def -= (b: VectoC): VectorC = { for (i <- range) v(i) -= b(i); this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' vector subtract in-place vector 'b'.
* @param b the vector to add (same kind, more efficient)
*/
def -= (b: VectorC): VectorC = { for (i <- range) v(i) -= b.v(i); this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' vector subtract in-place scalar 's'.
* @param s the scalar to add
*/
def -= (s: Complex): VectorC = { for (i <- range) v(i) -= s; this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' vector by vector 'b'.
* @param b the vector to multiply by (any kind)
*/
def * (b: VectoC): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = v(i) * b(i)
c
} // *
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' vector by vector 'b'.
* @param b the vector to multiply by (same kind, more efficient)
*/
def * (b: VectorC): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = v(i) * b.v(i)
c
} // *
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' vector by scalar 's'.
* @param s the scalar to multiply by
*/
def * (s: Complex): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = v(i) * s
c
} // *
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply in-place 'this' vector and vector 'b'.
* @param b the vector to add (any kind)
*/
def *= (b: VectoC): VectorC = { for (i <- range) v(i) *= b(i); this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply in-place 'this' vector and vector 'b'.
* @param b the vector to add (same kind, more efficient)
*/
def *= (b: VectorC): VectorC = { for (i <- range) v(i) *= b.v(i); this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply in-place 'this' vector and scalar 's'.
* @param s the scalar to add
*/
def *= (s: Complex): VectorC = { for (i <- range) v(i) *= s; this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide 'this' vector by vector 'b' (element-by-element).
* @param b the vector to divide by (any kind)
*/
def / (b: VectoC): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = v(i) / b(i)
c
} // /
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide 'this' vector by vector 'b' (element-by-element).
* @param b the vector to divide by (same kind, more efficient)
*/
def / (b: VectorC): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = v(i) / b.v(i)
c
} // /
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide 'this' vector by scalar 's'.
* @param s the scalar to divide by
*/
def / (s: Complex): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = v(i) / s
c
} // /
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide in-place 'this' vector and vector 'b'.
* @param b the vector to add (any kind)
*/
def /= (b: VectoC): VectorC = { for (i <- range) v(i) /= b(i); this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide in-place 'this' vector and vector 'b'.
* @param b the vector to add (same kind, more efficient)
*/
def /= (b: VectorC): VectorC = { for (i <- range) v(i) /= b.v(i); this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide in-place 'this' vector and scalar 's'.
* @param s the scalar to add
*/
def /= (s: Complex): VectorC = { for (i <- range) v(i) /= s; this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the vector containing each element of 'this' vector raised to the
* s-th power.
* @param s the scalar exponent
*/
def ~^ (s: Double): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = v(i) ~^ s
c
} // ~^
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Raise in-place each element of 'this' vector to the 's'-th power.
* @param s the scalar exponent
*/
def ~^= (s: Double): VectorC = { for (i <- range) v(i) = v(i) ~^ s; this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the vector containing the reciprocal of each element of 'this' vector.
*/
def recip: VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = _1 / v(i)
c
} // recip
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the vector that is the element-wise absolute value of 'this' vector.
*/
def abs: VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = ABS (v(i))
c
} // abs
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Sum the elements of 'this' vector.
*/
def sum: Complex = v.foldLeft (_0)((s, x) => s + x)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Sum the absolute value of the elements of 'this' vector.
*/
def sumAbs: Complex = v.foldLeft (_0)((s, x) => s + ABS (x))
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Sum the elements of 'this' vector skipping the 'i'-th element (Not Equal 'i').
* @param i the index of the element to skip
*/
def sumNE (i: Int): Complex = sum - v(i)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Sum the positive (> 0) elements of 'this' vector.
*/
def sumPos: Complex = v.foldLeft (_0)((s, x) => s + MAX (x, _0))
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Establish the rank order of the elements in 'self' vector, e.g.,
* (8.0, 2.0, 4.0, 6.0) is (3, 0, 1, 2).
*/
def rank: VectorI = VectorI (iqsort (v))
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Cumulate the values of 'this' vector from left to right (e.g., create a
* CDF from a pmf). Example: (4, 2, 3, 1) --> (4, 6, 9, 10)
*/
def cumulate: VectorC =
{
val c = new VectorC (dim)
var sum = _0
for (i <- range) { sum += v(i); c.v(i) = sum }
c
} // cumulate
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Normalize 'this' vector so that it sums to one (like a probability vector).
*/
def normalize: VectorC = this * (_1 / sum)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Normalize 'this' vector so its length is one (unit vector).
*/
def normalizeU: VectorC = this * (_1 / norm)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Normalize 'this' vector to have a maximum of one.
*/
def normalize1: VectorC = this * (_1 / max ())
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the dot product (or inner product) of 'this' vector with vector 'b'.
* @param b the other vector (any kind)
*/
def dot (b: VectoC): Complex =
{
var sum = _0
for (i <- range) sum += v(i) * b(i)
sum
} // dot
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the dot product (or inner product) of 'this' vector with vector 'b'.
* @param b the other vector (same kind, more efficient)
*/
def dot (b: VectorC): Complex =
{
var sum = _0
for (i <- range) sum += v(i) * b.v(i)
sum
} // dot
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the Manhattan norm (1-norm) of 'this' vector.
*/
def norm1: Complex =
{
var sum = _0
for (i <- range) sum += ABS (v(i))
sum
} // norm1
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the maximum element in 'this' vector.
* @param e the ending index (exclusive) for the search
*/
def max (e: Int = dim): Complex =
{
var x = v(0)
for (i <- 1 until e if v(i) > x) x = v(i)
x
} // max
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Take the maximum of 'this' vector with vector 'b' (element-by element).
* @param b the other vector (any kind)
*/
def max (b: VectoC): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = if (b(i) > v(i)) b(i) else v(i)
c
} // max
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Take the maximum of 'this' vector with vector 'b' (element-by element).
* @param b the other vector (same kind, more efficient)
*/
def max (b: VectorC): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = if (b.v(i) > v(i)) b.v(i) else v(i)
c
} // max
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the minimum element in 'this' vector.
* @param e the ending index (exclusive) for the search
*/
def min (e: Int = dim): Complex =
{
var x = v(0)
for (i <- 1 until e if v(i) < x) x = v(i)
x
} // max
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Take the minimum of 'this' vector with vector 'b' (element-by element).
* @param b the other vector (any kind)
*/
def min (b: VectoC): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = if (b(i) < v(i)) b(i) else v(i)
c
} // min
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Take the minimum of 'this' vector with vector 'b' (element-by element).
* @param b the other vector (same kind, more efficient)
*/
def min (b: VectorC): VectorC =
{
val c = new VectorC (dim)
for (i <- range) c.v(i) = if (b.v(i) < v(i)) b.v(i) else v(i)
c
} // min
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the argument maximum of 'this' vector (index of maximum element).
* @param e the ending index (exclusive) for the search
*/
def argmax (e: Int = dim): Int =
{
var j = 0
for (i <- 1 until e if v(i) > v(j)) j = i
j
} // argmax
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the argument maximum of 'this' vector (index of maximum element).
* @param s the starting index (inclusive) for the search
* @param e the ending index (exclusive) for the search
*/
def argmax (s: Int, e: Int): Int =
{
var j = s
for (i <- s + 1 until e if v(i) > v(j)) j = i
j
} // argmax
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the argument minimum of 'this' vector (index of minimum element).
* @param e the ending index (exclusive) for the search
*/
def argmin (e: Int = dim): Int =
{
var j = 0
for (i <- 1 until e if v(i) < v(j)) j = i
j
} // argmin
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the argument minimum of 'this' vector (index of minimum element).
* @param s the starting index (inclusive) for the search
* @param e the ending index (exclusive) for the search
*/
def argmin (s: Int, e: Int): Int =
{
var j = s
for (i <- s + 1 until e if v(i) < v(j)) j = i
j
} // argmin
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the argument minimum of 'this' vector (-1 if it's not negative).
* @param e the ending index (exclusive) for the search
*/
def argminNeg (e: Int = dim): Int =
{
val j = argmin (e); if (v(j) < _0) j else -1
} // argmaxNeg
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the argument maximum of 'this' vector (-1 if it's not positive).
* @param e the ending index (exclusive) for the search
*/
def argmaxPos (e: Int = dim): Int =
{
val j = argmax (e); if (v(j) > _0) j else -1
} // argmaxPos
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the index of the first negative element in 'this' vector (-1 otherwise).
* @param e the ending index (exclusive) for the search
*/
def firstNeg (e: Int = dim): Int =
{
for (i <- 0 until e if v(i) < _0) return i; -1
} // firstNeg
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the index of the first positive element in 'this' vector (-1 otherwise).
* @param e the ending index (exclusive) for the search
*/
def firstPos (e: Int = dim): Int =
{
for (i <- 0 until e if v(i) > _0) return i; -1
} // firstPos
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the index of the first occurrence of element 'x' in 'this' vector,
* or -1 if not found.
* @param x the given element
* @param e the ending index (exclusive) for the search
*/
def indexOf (x: Complex, e: Int = dim): Int =
{
for (i <- 0 until e if v(i) == x) return i; -1
} // indexOf
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find and return index of first element satisfying predicate 'p', or
* -1 if not found.
* @param p the predicate to check
*/
def indexWhere (p: (Complex) => Boolean): Int = v.indexWhere (p)
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Count the number of strictly negative elements in 'this' vector.
*/
def countNeg: Int =
{
var count = 0
for (i <- 0 until dim if v(i) < _0) count += 1
count
} // countNeg
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Count the number of strictly positive elements in 'this' vector.
*/
def countPos: Int =
{
var count = 0
for (i <- 0 until dim if v(i) > _0) count += 1
count
} // countPos
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Clean values in 'this' vector at or below the threshold 'thres' by setting
* them to zero. Iterative algorithms give approximate values and if very close
* to zero, may throw off other calculations, e.g., in computing eigenvectors.
* @param thres the cutoff threshold (a small value)
* @param relative whether to use relative or absolute cutoff
*/
def clean (thres: Double = TOL, relative: Boolean = true): VectorC =
{
val s = if (relative) mag else _1 // use vector magnitude or 1
for (i <- range) if (ABS (v(i)) <= thres * s) v(i) = _0
this
} // clean
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Count the number of distinct elements in 'this' vector.
*/
// def distinct: Int =
// {
// var count = 1
// val us = new VectorC (this); us.sort () // sorted vector
// for (i <- 1 until dim if us(i) != us(i-1)) count += 1
// count
// } // distinct
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return a new vector consisting of the distinct elements from 'this' vector.
*/
def distinct: VectorC = VectorC (v.distinct)
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Count the number of distinct elements in 'this' vector.
*/
def countinct: Int = v.distinct.length
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Determine whether 'x' is contained in 'this' vector.
* @param x the element to be checked
*/
def contains (x: Complex): Boolean = v contains x
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Determine whether any of the elements in 'x' are contained in 'this' vector.
* @param x the vector of elements to be checked
*/
def containsAny (x: VectorC): Boolean = (v intersect x.v).length != 0
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the intersection of vectors 'this' and 'x'.
* @param x the other vector
*/
def intersect (x: VectorC): VectorC = VectorC (v intersect x.v)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Reverse the order of the elements in 'this' vector.
*/
def reverse (): VectorC = new VectorC (dim, v.reverse)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Determine whether 'this' vector is in sorted (ascending) order.
*/
def isSorted: Boolean = (new SortingC (v)).isSorted
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Sort 'this' vector in-place in ascending (non-decreasing) order.
*/
def sort () { quickSort (v) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Sort 'this' vector in-place in descending (non-increasing) order.
*/
def sort2 () { qsort2 (v) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Swap elements 'i' and 'j' in 'this' vector.
* @param i the first element in the swap
* @param j the second element in the swap
*/
def swap (i: Int, j: Int) { val t = v(j); v(j) = v(i); v(i) = t }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether 'this' vector is nonnegative (has no negative elements).
*/
def isNonnegative: Boolean = { for (i <- range if v(i) < _0) return false; true }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compare 'this' vector with vector 'b'.
* @param b the other vector
*/
def tryCompareTo [B >: VectorC] (b: B)
(implicit view_1: (B) => PartiallyOrdered [B]): Option [Int] =
{
var le = true
var ge = true
for (i <- range) {
val b_i = b.asInstanceOf [VectorC] (i)
if (ge && (v(i) compare b_i) < 0) ge = false
else if (le && (v(i) compare b_i) > 0) le = false
} // for
if (ge && le) Some (0) else if (le) Some (-1) else if (ge) Some (1) else None
} // tryCompareTo
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Override equals to determine whether 'this' vector equals vector 'b'.
* @param b the vector to compare with this
*/
override def equals (b: Any): Boolean =
{
// b.isInstanceOf [VectorC] && (v.deep equals b.asInstanceOf [VectorC].v.deep)
if (! b.isInstanceOf [VectoC]) return false
val bb = b.asInstanceOf [VectoC]
if (dim != bb.dim) return false
val vm = if (dim > 0) mag else _1 // maximum magnitude element in vector
for (i <- range) {
// if (v(i) !=~ bb(i)) return false // stricter
if (v(i) !=~ bb(i) && v(i) + vm !=~ bb(i) + vm) return false
} // for
true
} // equals
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Must also override hashCode for 'this' vector to be compatible with equals.
*/
override def hashCode (): Int = v.deep.hashCode
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert 'this' vector to a String.
*/
override def toString: String =
{
var sb = new StringBuilder ("VectorC(")
if (dim == 0) return sb.append (")").mkString
for (i <- range) {
sb.append (fString.format (v(i)))
if (i == dim-1) sb = sb.dropRight (1)
} // for
sb.replace (sb.length-1, sb.length, ")").mkString
} // toString
} // VectorC class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `VectorC` object is the companion object for the `VectorC` class.
*/
object VectorC
{
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a `VectorC` from one or more values (repeated values `Complex`*).
* @param x the first `Complex` number
* @param xs the rest of the `Complex` numbers
*/
def apply (x: Complex, xs: Complex*): VectorC =
{
val c = new VectorC (1 + xs.length)
c(0) = x
for (i <- 0 until c.dim-1) c.v(i+1) = xs(i)
c
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a `VectorC` from a sequence/array of `Complex`s.
* @param xs the sequence/array of the `Complex` numbers
*/
def apply (xs: Seq [Complex]): VectorC =
{
val c = new VectorC (xs.length)
for (i <- 0 until c.dim) c.v(i) = xs(i)
c
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a `VectorC` from one or more values (repeated values String*).
* @param x the first String
* @param xs the rest of the Strings
*/
def apply (x: String, xs: String*): VectorC =
{
val c = new VectorC (1 + xs.length)
c(0) = Complex (x)
for (i <- 0 until c.dim-1) c.v(i+1) = Complex (xs(i))
c
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a `VectorC` from an array of Strings.
* @param xs the array of the Strings
*/
def apply (xs: Array [String]): VectorC =
{
val c = new VectorC (xs.length)
for (i <- c.range) c.v(i) = Complex (xs(i))
c
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a `VectorC` from an array of Strings, skipping the first 'skip'
* elements. If an element is non-numeric, use its hashcode.
* FIX: Might be better to map non-numeric Strings to ordinal values.
* @param xs the array of the Strings
* @param skip the number of elements at the beginning to skip (e.g., id column)
*/
def apply (xs: Array [String], skip: Int): VectorC =
{
val c = new VectorC (xs.length - skip)
for (i <- skip until xs.length) {
c.v(i - skip) = if (xs(i) matches "[\\\\-\\\\+]?\\\\d*(\\\\.\\\\d+)?(\\\\+[\\\\-\\\\+]?\\\\d*(\\\\.\\\\d+)?)?i") Complex (xs(i)) else xs(i).hashCode ()
} // for
c
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a `VectorC` with 'n' elements and fill it with the value 'x'.
* @param n the number of elements
* @param x the value to assign to all elements
*/
def fill (n: Int)(x: Complex): VectorC =
{
val c = new VectorC (n)
c.set (x)
c
} // fill
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a one vector (all elements are one) of length 'size'.
* @param size the size of the new vector
*/
def one (size: Int): VectorC = new VectorC (size, Array.fill (size)(_1))
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate scalar 'b' and vector 'u'.
* @param b the scalar to be concatenated - first part
* @param u the vector to be concatenated - second part (any kind)
*/
def ++ (b: Complex, u: VectoC): VectorC =
{
val c = new VectorC (u.dim + 1)
for (i <- c.range) c(i) = if (i == 0) b else u(i-1)
c
} // ++
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate scalar 'b' and vector 'u'.
* @param b the scalar to be concatenated - first part
* @param u the vector to be concatenated - second part (same kind, more efficient)
*/
def ++ (b: Complex, u: VectorC): VectorC =
{
val c = new VectorC (u.dim + 1)
for (i <- c.range) c(i) = if (i == 0) b else u.v(i-1)
c
} // ++
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return a `VectorC` containing a sequence of increasing integers in a range.
* @param start the start value of the vector, inclusive
* @param end the end value of the vector, exclusive (i.e., the first value not returned)
*/
def range (start: Int, end: Int): VectorC =
{
val c = new VectorC (end - start)
for (i <- c.range) c.v(i) = (start + i).toComplex
c
} // range
} // VectorC object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `VectorCTest` object tests the operations provided by `VectorC`.
* > run-main scalation.linalgebra.VectorCTest
*/
object VectorCTest extends App
{
var x: VectorC = null
var y: VectorC = null
for (l <- 1 to 4) {
println ("\\n\\tTest VectorC on vectors of dim " + l)
x = new VectorC (l)
y = new VectorC (l)
x.set (2)
y.set (3)
// test vector op scalar
println ("x + 4 = " + (x + 4))
println ("x - 4 = " + (x - 4))
println ("x * 4 = " + (x * 4))
println ("x / 4 = " + (x / 4))
println ("x ~^ 4 = " + (x ~^ 4))
// test vector op vector
println ("x + y = " + (x + y))
println ("x - y = " + (x - y))
println ("x * y = " + (x * y))
println ("x / y = " + (x / y))
println ("x.min = " + x.min ())
println ("x.max = " + x.max ())
println ("x.sum = " + x.sum)
println ("x.sumNE = " + x.sumNE (0))
println ("x dot y = " + (x dot y))
println ("x ∙ y = " + (x ∙ y))
println ("x.normSq = " + x.normSq)
println ("x.norm = " + x.norm)
println ("x < y = " + (x < y))
} // for
println ("hashCode (" + x + ") = " + x.hashCode ())
println ("hashCode (" + y + ") = " + y.hashCode ())
val z = VectorC ("1", "2", "3", "4")
println ("z = " + z)
println ("z.map (_ * 2) = " + z.map ((e: Complex) => e * 2))
println ("z.filter (_ > 2) = " + z.filter (_ > 2))
} // VectorCTest
| NBKlepp/fda | scalation_1.2/src/main/scala/scalation/linalgebra/VectorC.scala | Scala | mit | 42,650 |
package org.sisioh.aws4s.s3.model
import com.amazonaws.services.s3.model.BucketVersioningConfiguration
import org.sisioh.aws4s.PimpedType
object BucketVersioningConfigurationFactory {
def create(): BucketVersioningConfiguration = new BucketVersioningConfiguration()
def creates(status: String): BucketVersioningConfiguration = new BucketVersioningConfiguration(status)
}
class RichBucketVersioningConfiguration(val underlying: BucketVersioningConfiguration)
extends AnyVal with PimpedType[BucketVersioningConfiguration] {
def status: String = underlying.getStatus
def status_=(value: String): Unit = {
underlying.setStatus(value)
}
def isMfaDeleteEnabled: Boolean = underlying.isMfaDeleteEnabled
def mfaDeleteEnabled_=(value: Boolean): Unit = {
underlying.setMfaDeleteEnabled(value)
}
}
| everpeace/aws4s | aws4s-s3/src/main/scala/org/sisioh/aws4s/s3/model/RichBucketVersioningConfiguration.scala | Scala | mit | 826 |
/*******************************************************************************
* Copyright 2010 Maxime Lévesque
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************** */
package org.squeryl.internals
import org.squeryl.dsl.ast.{ExpressionNode}
import collection.mutable.{HashSet, ArrayBuffer}
import org.squeryl.dsl.ast.ConstantTypedExpression
import org.squeryl.dsl.ast.ConstantExpressionNodeList
trait StatementParam
case class ConstantStatementParam(p: ConstantTypedExpression[_,_]) extends StatementParam
case class FieldStatementParam(v: AnyRef, fmd: FieldMetaData) extends StatementParam
/*
* ParamWithMapper is a workadound to accomodate the ConstantExpressionNodeList, ideally 'in' and 'notIn' would grab the TEF in scope :
*
* def in[A2,T2](t: Traversable[A2])(implicit cc: CanCompare[T1,T2], tef: TypedExpressionFactory[A2,T2]): LogicalBoolean =
* new InclusionOperator(this, new RightHandSideOfIn(new zConstantExpressionNodeList(t, mapper)).toIn)
*
* type inferencer doesn't like it, so I grab the mapper that is available, which is JDBC compatible, so in practive it should work
* all the time...
* */
case class ConstantExpressionNodeListParam(v: AnyRef, l: ConstantExpressionNodeList[_]) extends StatementParam
/**
* @param isForDisplay: when true, users of StatementWriter should write
* jdbc params as strings in statement,
* otherwise a jdbc param declarations '?' should be written, and
* the param values should be accumulated with addParam(s)
*/
class StatementWriter(val isForDisplay: Boolean, val databaseAdapter: DatabaseAdapter) {
outer =>
def this(databaseAdapter: DatabaseAdapter) = this(false, databaseAdapter)
val scope = new HashSet[String]
protected val _paramList = new ArrayBuffer[StatementParam]
/**
* a surrogate writer will accumulate text within itself (not the parent)
* while param accumulation (addParam) will go to the root writer, this
* is useful when it is easier to first build a string and to write it
* afterwards
*/
def surrogate:StatementWriter = new StatementWriter(isForDisplay, databaseAdapter) {
indentWidth = outer.indentWidth
override def surrogate = outer.surrogate
override def addParam(p: StatementParam) = outer.addParam(p)
}
def params: Iterable[StatementParam] = _paramList
private val _stringBuilder = new StringBuilder(256)
def statement = _stringBuilder.toString
def addParam(p: StatementParam) = _paramList.append(p)
override def toString =
if(_paramList.isEmpty)
statement
else
_paramList.mkString(statement+"\\njdbcParams:[",",","]")
private val INDENT_INCREMENT = 2
private var indentWidth = 0
def indent(width: Int) = indentWidth += width
def unindent(width: Int) = indentWidth -= width
def indent: Unit = indent(INDENT_INCREMENT)
def unindent: Unit = unindent(INDENT_INCREMENT)
private def _append(s: String) = {
_flushPendingNextLine
_stringBuilder.append(s)
}
private def _writeIndentSpaces: Unit =
_writeIndentSpaces(indentWidth)
private def _writeIndentSpaces(c: Int) =
for( i <- 1 to c)
_append(" ")
def nextLine = {
_append("\\n")
_writeIndentSpaces
}
private var _lazyPendingLine: Option[() => Unit] = None
def pushPendingNextLine =
_lazyPendingLine = Some(()=> nextLine)
private def _flushPendingNextLine =
if(_lazyPendingLine != None) {
val pl = _lazyPendingLine
_lazyPendingLine = None
val lpl = pl.get
lpl()
}
def writeLines(s: String*) = {
val size = s.size
val c = 1
for(l <- s) {
_append(l)
if(c < size)
nextLine
}
}
def writeLinesWithSeparator(s: Iterable[String], separator: String) = {
val size = s.size
var c = 1
for(l <- s) {
_append(l)
if(c < size)
_append(separator)
nextLine
c += 1
}
}
def writeNodesWithSeparator(s: Iterable[ExpressionNode], separator: String, newLineAfterSeparator: Boolean) = {
val size = s.size
var c = 1
for(n <- s) {
n.write(this)
if(c < size) {
_append(separator)
if(newLineAfterSeparator)
nextLine
}
c += 1
}
}
def write(s: String*) =
for(s0 <- s)
_append(s0)
def writeIndented(u: =>Unit): Unit =
writeIndented(INDENT_INCREMENT, u)
def writeIndented(width: Int, u: =>Unit) = {
indent(width)
_writeIndentSpaces(width)
u
unindent(width)
}
def quoteName(s: String) = databaseAdapter.quoteName(s)
}
| ccap/Squeryl | src/main/scala/org/squeryl/internals/StatementWriter.scala | Scala | apache-2.0 | 5,318 |
object Test {
extension (i: Int)
def pow2 = i * i
println(5.pow2)
}
| dotty-staging/dotty | tests/pos/i7403.scala | Scala | apache-2.0 | 79 |
package lang.akka.first
import akka.actor.{Actor, Props}
import akka.event.Logging
object FirstActor {
}
class FirstActor extends Actor {
val log = Logging(context.system, this)
override def receive: Receive = {
case "test" =>
log.info("get test")
sender() ! ""
case _ => log.info("get wtf")
}
}
| congdepeng/scalab | src/main/scala/lang/akka/first/FirstActor.scala | Scala | apache-2.0 | 332 |
package org.jetbrains.plugins.scala.scalai18n.codeInspection.i18n.folding
import com.intellij.lang.ASTNode
import com.intellij.lang.folding.{FoldingBuilderEx, FoldingDescriptor}
import com.intellij.lang.properties.IProperty
import com.intellij.openapi.editor.Document
import com.intellij.openapi.project.Project
import com.intellij.psi._
import com.intellij.psi.impl.source.SourceTreeToPsiMap
import com.intellij.util.ObjectUtils
import org.jetbrains.annotations.NotNull
import org.jetbrains.plugins.scala.extensions.{IteratorExt, PsiElementExt}
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.base.ScLiteral
import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScArgumentExprList, ScExpression, ScMethodCall, ScReferenceExpression}
import org.jetbrains.plugins.scala.lang.psi.util.ScalaConstantExpressionEvaluator
import org.jetbrains.plugins.scala.scalai18n.codeInspection.i18n.ScalaI18nUtil
/**
* @author Ksenia.Sautina
* @since 7/17/12
*/
class ScalaPropertyFoldingBuilder extends FoldingBuilderEx {
@NotNull def buildFoldRegions(@NotNull element: PsiElement, @NotNull document: Document, quick: Boolean): Array[FoldingDescriptor] = {
if (!element.isInstanceOf[ScalaFile] || quick || !ScalaI18nUtil.isFoldingsOn) {
return FoldingDescriptor.EMPTY
}
val file: ScalaFile = element.asInstanceOf[ScalaFile]
val project: Project = file.getProject
val result = new java.util.ArrayList[FoldingDescriptor]
file.depthFirst()
.filterByType[ScLiteral]
.foreach(checkLiteral(project, _, result))
result.toArray(new Array[FoldingDescriptor](result.size))
}
def getPlaceholderText(@NotNull node: ASTNode): String = {
val element: PsiElement = SourceTreeToPsiMap.treeElementToPsi(node)
element match {
case literal: ScLiteral =>
return ScalaI18nUtil.getI18nMessage(element.getProject, literal)
case methodCall: ScMethodCall =>
return ScalaI18nUtil.formatMethodCallExpression(element.getProject, methodCall)
case _ =>
}
element.getText
}
def isCollapsedByDefault(@NotNull node: ASTNode): Boolean = ScalaI18nUtil.isFoldingsOn
private def checkLiteral(project: Project, literal: ScLiteral, result: java.util.ArrayList[FoldingDescriptor]) {
if (ScalaI18nUtil.isI18nProperty(project, literal)) {
val property: IProperty = ScalaI18nUtil.getI18nProperty(project, literal)
val set = new java.util.HashSet[AnyRef]
if (property != null) set.add(property)
literal.getParent match {
case argsList: ScArgumentExprList =>
val exprs = argsList.exprsArray
val msg: String = ScalaI18nUtil.formatI18nProperty(literal, property)
if (msg != literal.getText && (exprs(0) == literal)) {
val count: Int = ScalaI18nUtil.getPropertyValueParamsMaxCount(literal)
val args: Array[ScExpression] = argsList.exprsArray
if (args.length == 1 + count && argsList.getParent.isInstanceOf[ScMethodCall]) {
val evaluator = new ScalaConstantExpressionEvaluator
val refOrValue = args.drop(1).forall { arg =>
arg.isInstanceOf[ScReferenceExpression] ||
evaluator.computeConstantExpression(arg, throwExceptionOnOverflow = false) != null
}
if (refOrValue) {
result.add(new FoldingDescriptor(ObjectUtils.assertNotNull(argsList.getParent.getNode), argsList.getParent.getTextRange, null, set))
return
}
}
}
case _ =>
}
result.add(new FoldingDescriptor(ObjectUtils.assertNotNull(literal.getNode), literal.getTextRange, null, set))
}
}
}
| ilinum/intellij-scala | src/org/jetbrains/plugins/scala/scalai18n/codeInspection/i18n/folding/ScalaPropertyFoldingBuilder.scala | Scala | apache-2.0 | 3,741 |
package com.mattwittmann
import akka.actor.{ActorSystem, Props}
import akka.io.IO
import spray.can.Http
object Boot extends App {
implicit val system = ActorSystem("etymonline_rest_api")
val etymologyResource = system.actorOf(Props[EtymologyResource], classOf[EtymologyResource].getSimpleName)
IO(Http) ! Http.Bind(etymologyResource, "localhost", port = 8080)
}
| matt-wittmann/etymonline_rest_api | src/main/scala/com/mattwittmann/Boot.scala | Scala | apache-2.0 | 370 |
/** Multi-dimensional array creation with `new` was removed in 2.10.
* The replacement Array.ofDim[Int](10,10) makes the original mistake
* which was tested here impossible.
* This test will fail now because the constructor doesn't exist anymore.
*/
class Foo {
val a: Array[Int] = new Array(10, 10)
}
//Before removal of constructor non-unary Array constructors:
/** Check that a multi-dimensional array can't be created
* when the wrong number of arguments w.r.t. to the array's
* type is given.
*/
| folone/dotty | tests/untried/neg/multi-array.scala | Scala | bsd-3-clause | 516 |
/*
* Copyright 2015 – 2016 Martin Seeler
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.oanda
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.testkit.TestFrameworkInterface.Scalatest
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Keep, Sink, Source}
import org.scalatest._
import rx.oanda.OandaEnvironment._
class ConnectionFlowSpec extends FlatSpec with Matchers with Scalatest {
behavior of "The ConnectionFlow"
implicit val sys = ActorSystem()
implicit val mat = ActorMaterializer()
import sys.dispatcher
it must "provide the correct api connection for the trade practice environment" in {
val apiFlow = OandaEnvironment.TradePracticeEnvironment("token").apiFlow[Long]
val pool = Source.empty.viaMat(apiFlow)(Keep.right).toMat(Sink.ignore)(Keep.left).run()
pool.setup.host should be ("api-fxpractice.oanda.com")
pool.setup.port shouldBe 443
}
it must "provide the correct stream connection for the trade practice environment" in {
val streamFlow = OandaEnvironment.TradePracticeEnvironment("token").streamFlow[Long]
val pool = Source.empty.viaMat(streamFlow)(Keep.right).toMat(Sink.ignore)(Keep.left).run()
pool.setup.host should be ("stream-fxpractice.oanda.com")
pool.setup.port shouldBe 443
}
it must "provide the correct api connection for the trade environment" in {
val apiFlow = OandaEnvironment.TradeEnvironment("token").apiFlow[Long]
val pool = Source.empty.viaMat(apiFlow)(Keep.right).toMat(Sink.ignore)(Keep.left).run()
pool.setup.host should be ("api-fxtrade.oanda.com")
pool.setup.port shouldBe 443
}
it must "provide the correct stream connection for the trade environment" in {
val streamFlow = OandaEnvironment.TradeEnvironment("token").streamFlow[Long]
val pool = Source.empty.viaMat(streamFlow)(Keep.right).toMat(Sink.ignore)(Keep.left).run()
pool.setup.host should be ("stream-fxtrade.oanda.com")
pool.setup.port shouldBe 443
}
def cleanUp(): Unit = Http().shutdownAllConnectionPools().onComplete(_ ⇒ sys.terminate())
}
| MartinSeeler/rx-oanda | src/test/scala/rx/oanda/ConnectionFlowSpec.scala | Scala | apache-2.0 | 2,632 |
/* NSC -- new scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala
package reflect
package internal
import scala.reflect.internal.util.StringOps.{ countAsString, countElementsAsString }
trait Kinds {
self: SymbolTable =>
import definitions._
private type SymPair = ((Symbol, Symbol)) // ((Argument, Parameter))
case class KindErrors(
arity: List[SymPair] = Nil,
variance: List[SymPair] = Nil,
strictness: List[SymPair] = Nil
) {
def isEmpty = arity.isEmpty && variance.isEmpty && strictness.isEmpty
def arityError(syms: SymPair) = copy(arity = arity :+ syms)
def varianceError(syms: SymPair) = copy(variance = variance :+ syms)
def strictnessError(syms: SymPair) = copy(strictness = strictness :+ syms)
def ++(errs: KindErrors) = KindErrors(
arity ++ errs.arity,
variance ++ errs.variance,
strictness ++ errs.strictness
)
// @M TODO this method is duplicated all over the place (varianceString)
private def varStr(s: Symbol): String =
if (s.isCovariant) "covariant"
else if (s.isContravariant) "contravariant"
else "invariant"
private def qualify(a0: Symbol, b0: Symbol): String = if (a0.toString != b0.toString) "" else {
if((a0 eq b0) || (a0.owner eq b0.owner)) ""
else {
var a = a0; var b = b0
while (a.owner.name == b.owner.name) { a = a.owner; b = b.owner}
if (a.locationString ne "") " (" + a.locationString.trim + ")" else ""
}
}
private def kindMessage(a: Symbol, p: Symbol)(f: (String, String) => String): String =
f(a+qualify(a,p), p+qualify(p,a))
// Normally it's nicer to print nothing rather than '>: Nothing <: Any' all over
// the place, but here we need it for the message to make sense.
private def strictnessMessage(a: Symbol, p: Symbol) =
kindMessage(a, p)("%s's bounds%s are stricter than %s's declared bounds%s".format(
_, a.info, _, p.info match {
case tb @ TypeBounds(_, _) if tb.isEmptyBounds => " >: Nothing <: Any"
case tb => "" + tb
})
)
private def varianceMessage(a: Symbol, p: Symbol) =
kindMessage(a, p)("%s is %s, but %s is declared %s".format(_, varStr(a), _, varStr(p)))
private def arityMessage(a: Symbol, p: Symbol) =
kindMessage(a, p)("%s has %s, but %s has %s".format(
_, countElementsAsString(a.typeParams.length, "type parameter"),
_, countAsString(p.typeParams.length))
)
private def buildMessage(xs: List[SymPair], f: (Symbol, Symbol) => String) = (
if (xs.isEmpty) ""
else xs map f.tupled mkString ("\\n", ", ", "")
)
def errorMessage(targ: Type, tparam: Symbol): String = (
(targ+"'s type parameters do not match "+tparam+"'s expected parameters:")
+ buildMessage(arity, arityMessage)
+ buildMessage(variance, varianceMessage)
+ buildMessage(strictness, strictnessMessage)
)
}
val NoKindErrors = KindErrors(Nil, Nil, Nil)
// TODO: this desperately needs to be cleaned up
// plan: split into kind inference and subkinding
// every Type has a (cached) Kind
def kindsConform(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): Boolean =
checkKindBounds0(tparams, targs, pre, owner, explainErrors = false).isEmpty
/** Check whether `sym1`'s variance conforms to `sym2`'s variance.
*
* If `sym2` is invariant, `sym1`'s variance is irrelevant. Otherwise they must be equal.
*/
private def variancesMatch(sym1: Symbol, sym2: Symbol) = (
sym2.variance.isInvariant
|| sym1.variance == sym2.variance
)
/** Check well-kindedness of type application (assumes arities are already checked) -- @M
*
* This check is also performed when abstract type members become concrete (aka a "type alias") -- then tparams.length==1
* (checked one type member at a time -- in that case, prefix is the name of the type alias)
*
* Type application is just like value application: it's "contravariant" in the sense that
* the type parameters of the supplied type arguments must conform to the type parameters of
* the required type parameters:
* - their bounds must be less strict
* - variances must match (here, variances are absolute, the variance of a type parameter does not influence the variance of its higher-order parameters)
* - @M TODO: are these conditions correct,sufficient&necessary?
*
* e.g. class Iterable[t, m[+x <: t]] --> the application Iterable[Int, List] is okay, since
* List's type parameter is also covariant and its bounds are weaker than <: Int
*/
def checkKindBounds0(
tparams: List[Symbol],
targs: List[Type],
pre: Type,
owner: Symbol,
explainErrors: Boolean
): List[(Type, Symbol, KindErrors)] = {
// instantiate type params that come from outside the abstract type we're currently checking
def transform(tp: Type, clazz: Symbol): Type = tp.asSeenFrom(pre, clazz)
// check that the type parameters hkargs to a higher-kinded type conform to the
// expected params hkparams
def checkKindBoundsHK(
hkargs: List[Symbol],
arg: Symbol,
param: Symbol,
paramowner: Symbol,
underHKParams: List[Symbol],
withHKArgs: List[Symbol]
): KindErrors = {
var kindErrors: KindErrors = NoKindErrors
def bindHKParams(tp: Type) = tp.substSym(underHKParams, withHKArgs)
// @M sometimes hkargs != arg.typeParams, the symbol and the type may
// have very different type parameters
val hkparams = param.typeParams
def kindCheck(cond: Boolean, f: KindErrors => KindErrors) {
if (!cond)
kindErrors = f(kindErrors)
}
if (settings.debug) {
log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner)
log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner)
log("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs)
}
if (!sameLength(hkargs, hkparams)) {
// Any and Nothing are kind-overloaded
if (arg == AnyClass || arg == NothingClass) NoKindErrors
// shortcut: always set error, whether explainTypesOrNot
else return kindErrors.arityError(arg -> param)
}
else foreach2(hkargs, hkparams) { (hkarg, hkparam) =>
if (hkparam.typeParams.isEmpty && hkarg.typeParams.isEmpty) { // base-case: kind *
kindCheck(variancesMatch(hkarg, hkparam), _ varianceError (hkarg -> hkparam))
// instantiateTypeParams(tparams, targs)
// higher-order bounds, may contain references to type arguments
// substSym(hkparams, hkargs)
// these types are going to be compared as types of kind *
//
// Their arguments use different symbols, but are
// conceptually the same. Could also replace the types by
// polytypes, but can't just strip the symbols, as ordering
// is lost then.
val declaredBounds = transform(hkparam.info.instantiateTypeParams(tparams, targs).bounds, paramowner)
val declaredBoundsInst = transform(bindHKParams(declaredBounds), owner)
val argumentBounds = transform(hkarg.info.bounds, owner)
kindCheck(declaredBoundsInst <:< argumentBounds, _ strictnessError (hkarg -> hkparam))
debuglog(
"checkKindBoundsHK base case: " + hkparam +
" declared bounds: " + declaredBounds +
" after instantiating earlier hkparams: " + declaredBoundsInst + "\\n" +
"checkKindBoundsHK base case: "+ hkarg +
" has bounds: " + argumentBounds
)
}
else {
hkarg.initialize // SI-7902 otherwise hkarg.typeParams yields List(NoSymbol)!
debuglog("checkKindBoundsHK recursing to compare params of "+ hkparam +" with "+ hkarg)
kindErrors ++= checkKindBoundsHK(
hkarg.typeParams,
hkarg,
hkparam,
paramowner,
underHKParams ++ hkparam.typeParams,
withHKArgs ++ hkarg.typeParams
)
}
if (!explainErrors && !kindErrors.isEmpty)
return kindErrors
}
if (explainErrors) kindErrors
else NoKindErrors
}
if (settings.debug && (tparams.nonEmpty || targs.nonEmpty)) log(
"checkKindBounds0(" + tparams + ", " + targs + ", " + pre + ", "
+ owner + ", " + explainErrors + ")"
)
flatMap2(tparams, targs) { (tparam, targ) =>
// Prevent WildcardType from causing kind errors, as typevars may be higher-order
if (targ == WildcardType) Nil else {
// force symbol load for #4205
targ.typeSymbolDirect.info
// @M must use the typeParams of the *type* targ, not of the *symbol* of targ!!
val tparamsHO = targ.typeParams
if (targ.isHigherKinded || tparam.typeParams.nonEmpty) {
// NOTE: *not* targ.typeSymbol, which normalizes
val kindErrors = checkKindBoundsHK(
tparamsHO, targ.typeSymbolDirect, tparam,
tparam.owner, tparam.typeParams, tparamsHO
)
if (kindErrors.isEmpty) Nil else {
if (explainErrors) List((targ, tparam, kindErrors))
// Return as soon as an error is seen if there's nothing to explain.
else return List((NoType, NoSymbol, NoKindErrors))
}
}
else Nil
}
}
}
/**
* The data structure describing the kind of a given type.
*
* Proper types are represented using ProperTypeKind.
*
* Type constructors are represented using TypeConKind.
*/
abstract class Kind {
import Kind.StringState
def description: String
def order: Int
def bounds: TypeBounds
/** Scala syntax notation of this kind.
* Proper types are expresses as A.
* Type constructors are expressed as F[k1 >: lo <: hi, k2, ...] where k1, k2, ... are parameter kinds.
* If the bounds exists at any level, it preserves the type variable names. Otherwise,
* it uses prescribed letters for each level: A, F, X, Y, Z.
*/
def scalaNotation: String
/** Kind notation used in http://adriaanm.github.com/files/higher.pdf.
* Proper types are expressed as *.
* Type constructors are expressed * -> *(lo, hi) -(+)-> *.
*/
def starNotation: String
/** Contains bounds either as part of itself or its arguments.
*/
def hasBounds: Boolean = !bounds.isEmptyBounds
private[internal] def buildState(sym: Symbol, v: Variance)(s: StringState): StringState
}
object Kind {
private[internal] sealed trait ScalaNotation
private[internal] sealed case class Head(order: Int, n: Option[Int], alias: Option[String]) extends ScalaNotation {
override def toString: String = {
alias getOrElse {
typeAlias(order) + n.map(_.toString).getOrElse("")
}
}
private def typeAlias(x: Int): String =
x match {
case 0 => "A"
case 1 => "F"
case 2 => "X"
case 3 => "Y"
case 4 => "Z"
case n if n < 12 => ('O'.toInt - 5 + n).toChar.toString
case _ => "V"
}
}
private[internal] sealed case class Text(value: String) extends ScalaNotation {
override def toString: String = value
}
private[internal] case class StringState(tokens: Seq[ScalaNotation]) {
override def toString: String = tokens.mkString
def append(value: String): StringState = StringState(tokens :+ Text(value))
def appendHead(order: Int, sym: Symbol): StringState = {
val n = countByOrder(order) + 1
val alias = if (sym eq NoSymbol) None
else Some(sym.nameString)
StringState(tokens :+ Head(order, Some(n), alias))
}
def countByOrder(o: Int): Int = tokens count {
case Head(`o`, _, _) => true
case t => false
}
// Replace Head(o, Some(1), a) with Head(o, None, a) if countByOrder(o) <= 1, so F1[A] becomes F[A]
def removeOnes: StringState = {
val maxOrder = (tokens map {
case Head(o, _, _) => o
case _ => 0
}).max
StringState((tokens /: (0 to maxOrder)) { (ts: Seq[ScalaNotation], o: Int) =>
if (countByOrder(o) <= 1)
ts map {
case Head(`o`, _, a) => Head(o, None, a)
case t => t
}
else ts
})
}
// Replace Head(o, n, Some(_)) with Head(o, n, None), so F[F] becomes F[A].
def removeAlias: StringState = {
StringState(tokens map {
case Head(o, n, Some(_)) => Head(o, n, None)
case t => t
})
}
}
private[internal] object StringState {
def empty: StringState = StringState(Seq())
}
def FromParams(tparams: List[Symbol]): Type = GenPolyType(tparams, AnyTpe)
def Wildcard: Type = WildcardType
}
class ProperTypeKind(val bounds: TypeBounds) extends Kind {
import Kind.StringState
val description: String = "This is a proper type."
val order = 0
private[internal] def buildState(sym: Symbol, v: Variance)(s: StringState): StringState = {
s.append(v.symbolicString).appendHead(order, sym).append(bounds.scalaNotation(_.toString))
}
def scalaNotation: String = Kind.Head(order, None, None) + bounds.scalaNotation(_.toString)
def starNotation: String = "*" + bounds.starNotation(_.toString)
}
object ProperTypeKind {
def apply: ProperTypeKind = this(TypeBounds.empty)
def apply(bounds: TypeBounds): ProperTypeKind = new ProperTypeKind(bounds)
def unapply(ptk: ProperTypeKind): Some[TypeBounds] = Some(ptk.bounds)
}
class TypeConKind(val bounds: TypeBounds, val args: Seq[TypeConKind.Argument]) extends Kind {
import Kind.StringState
val order = (args map (_.kind.order)).max + 1
def description: String =
if (order == 1) "This is a type constructor: a 1st-order-kinded type."
else "This is a type constructor that takes type constructor(s): a higher-kinded type."
override def hasBounds: Boolean = super.hasBounds || args.exists(_.kind.hasBounds)
def scalaNotation: String = {
val s = buildState(NoSymbol, Variance.Invariant)(StringState.empty).removeOnes
val s2 = if (hasBounds) s
else s.removeAlias
s2.toString
}
private[internal] def buildState(sym: Symbol, v: Variance)(s0: StringState): StringState = {
var s: StringState = s0
s = s.append(v.symbolicString).appendHead(order, sym).append("[")
args.zipWithIndex foreach { case (arg, i) =>
s = arg.kind.buildState(arg.sym, arg.variance)(s)
if (i != args.size - 1) {
s = s.append(",")
}
}
s = s.append("]").append(bounds.scalaNotation(_.toString))
s
}
def starNotation: String = {
import Variance._
(args map { arg =>
(if (arg.kind.order == 0) arg.kind.starNotation
else "(" + arg.kind.starNotation + ")") +
(if (arg.variance == Invariant) " -> "
else " -(" + arg.variance.symbolicString + ")-> ")
}).mkString + "*" + bounds.starNotation(_.toString)
}
}
object TypeConKind {
def apply(args: Seq[TypeConKind.Argument]): TypeConKind = this(TypeBounds.empty, args)
def apply(bounds: TypeBounds, args: Seq[TypeConKind.Argument]): TypeConKind = new TypeConKind(bounds, args)
def unapply(tck: TypeConKind): Some[(TypeBounds, Seq[TypeConKind.Argument])] = Some((tck.bounds, tck.args))
case class Argument(variance: Variance, kind: Kind)(val sym: Symbol) {}
}
/**
* Starting from a Symbol (sym) or a Type (tpe), infer the kind that classifies it (sym.tpeHK/tpe).
*/
object inferKind {
import TypeConKind.Argument
abstract class InferKind {
protected def infer(tpe: Type, owner: Symbol, topLevel: Boolean): Kind
protected def infer(sym: Symbol, topLevel: Boolean): Kind = infer(sym.tpeHK, sym.owner, topLevel)
def apply(sym: Symbol): Kind = infer(sym, true)
def apply(tpe: Type, owner: Symbol): Kind = infer(tpe, owner, true)
}
def apply(pre: Type): InferKind = new InferKind {
protected def infer(tpe: Type, owner: Symbol, topLevel: Boolean): Kind = {
val bounds = if (topLevel) TypeBounds.empty
else tpe.asSeenFrom(pre, owner).bounds
if(!tpe.isHigherKinded) ProperTypeKind(bounds)
else TypeConKind(bounds, tpe.typeParams map { p => Argument(p.variance, infer(p, false))(p) })
}
}
}
}
| felixmulder/scala | src/reflect/scala/reflect/internal/Kinds.scala | Scala | bsd-3-clause | 16,872 |
package changestream.actors
import java.util.UUID
import akka.actor.{ Actor, ActorRef, ActorRefFactory }
import changestream.events.MutationWithInfo
import changestream.events._
import kamon.Kamon
import org.slf4j.LoggerFactory
class TransactionActor(getNextHop: ActorRefFactory => ActorRef) extends Actor {
protected val log = LoggerFactory.getLogger(getClass)
protected val batchSizeMetric = Kamon.histogram("changestream.binlog_event.row_count")
protected val transactionSizeMetric = Kamon.histogram("changestream.transaction.row_count")
protected val nextHop = getNextHop(context)
/** Mutable State! */
protected var mutationCount: Long = 1
protected var currentGtid: Option[String] = None
protected var previousMutation: Option[MutationWithInfo] = None
def receive = {
case BeginTransaction =>
log.debug("Received BeginTransacton")
mutationCount = 1
currentGtid = Some(UUID.randomUUID.toString)
previousMutation = None
case Gtid(guid) =>
log.debug("Received GTID for transaction: {}", guid)
currentGtid = Some(guid)
case event: MutationWithInfo =>
log.debug("Received Mutation for tableId: {}", event.mutation.tableId)
batchSizeMetric.record(event.mutation.rows.length)
currentGtid match {
case None =>
nextHop ! event
case Some(gtid) =>
previousMutation.foreach { mutation =>
log.debug("Adding transaction info and forwarding to the {} actor.", nextHop.path.name)
nextHop ! mutation
}
previousMutation = Some(event.copy(
transaction = Some(TransactionInfo(
gtid = gtid,
currentRow = mutationCount
))
))
mutationCount += event.mutation.rows.length
}
case CommitTransaction(position) =>
log.debug("Received Commit with position {}", position)
previousMutation.foreach { mutation =>
log.debug("Adding transaction info and forwarding to the {} actor.", nextHop.path.name)
nextHop ! mutation.copy(
transaction = mutation.transaction.map { txInfo =>
txInfo.copy(lastMutationInTransaction = true)
},
// TODO: this is unfortunate... because we are now essentially saving the "last safe position" we are guaranteed to replay events when we shut down un-gracefully
nextPosition = mutation.nextPosition.split(":")(0) + ":" + position.toString
)
}
transactionSizeMetric.record(mutationCount)
mutationCount = 1
currentGtid = None
previousMutation = None
case RollbackTransaction =>
log.debug("Received Rollback")
// TODO: this probably doesn't work for mysql configurations that send a rollback event (vs only sending committed events).. consider removing the rollback handling
previousMutation.foreach { mutation =>
log.debug("Adding transaction info and forwarding to the {} actor.", nextHop.path.name)
nextHop ! mutation.copy(
transaction = mutation.transaction.map { txInfo =>
txInfo.copy(lastMutationInTransaction = true)
}
)
}
transactionSizeMetric.record(mutationCount)
mutationCount = 1
currentGtid = None
previousMutation = None
}
}
| mavenlink/changestream | src/main/scala/changestream/actors/TransactionActor.scala | Scala | mit | 3,322 |
package application.impl
import javax.inject.Inject
import application.MaintenanceService
import domain.user.UserAuthority.ApplicationAdministrator
import domain.user.{ User, UserRepository }
import scalikejdbc.DBSession
/**
* MaintenanceServiceの実装クラス.
*/
class MaintenanceServiceImpl @Inject() (
userRepository: UserRepository
) extends MaintenanceService {
/**
* @inheritdoc
*/
override def createAdminUser()(implicit session: DBSession): Unit = {
if (!userRepository.existsUser) {
val adminUser = User(
userId = None,
loginId = "admin@vss-kanban",
name = "このユーザは早めに削除するか、パスワードを変更して下さい",
authority = ApplicationAdministrator
)
userRepository.create(adminUser, "vss-kanban-admin")
}
}
}
| nemuzuka/vss-kanban | src/main/scala/application/impl/MaintenanceServiceImpl.scala | Scala | mit | 837 |
package intro
/**
* Created by Randyt on 2017/7/5.
*/
object intro {
def rev(arr:List[Int]):List[Int] = arr match{
case Nil=>List()
case x::Nil=>List(x)
case x::xs=>rev(xs)++List(x)
}
}
| hotung1027/HackerRank | Functional Programming/Scala/src/main/scala/intro/intro.scala | Scala | mit | 200 |
package time
import text.{StringNone, StringOption, StringSome}
import scala.util.matching.Regex
import scala.util.matching.Regex.Match
/**
* <pre>
* Created on 11/19/14.
* </pre>
* @author K.Sakamoto
*/
object TimeExtractorInTimeExpression extends TimeExtractor {
private var romanCalendar: Int = 0
/*
//40年代の場合、ひとつ前の年が1801年なら、1840年代と解釈するが、
//ひとつ前の年がわからない場合は、1940年代と解釈する処理
def correctTime(times: Seq[Time]): Seq[Time] = {
val size = times.size
if (1 < size) {
val buffer = ListBuffer[Time]()
buffer += times(0)
{for (i <- 1 until times.size) yield {
var yearBegin = times(i).yearBegin
var yearEnd = times(i).yearEnd
yearBegin match {
case Some(begin) =>
yearEnd match {
case Some(end) =>
if (0 < begin && begin < 100) {
} else {
}
case None =>
}
case None =>
yearEnd match {
case Some(end) =>
case None =>
times(i)
}
}
}}.toList ++= buffer
buffer.result()
} else {
times
}
}
*/
private type BeginCountOptions = (Option[Int], Option[Int])
private type BeginEndOptions = (Option[Int], Option[Int])
override def extract(sentence: StringOption): Seq[TimeTmp] = {
val timeRegex: Regex = """(紀元前|前)?([0-9]*)?(年間|年分|年にわたる|年に渡る|千年紀|世紀|年代|年|現代|近代|近現代|近世|中世|古代)?(前半|後半|初頭|末|初期|末期|初め|はじめ|終り|終わり|おわり)?(ころ|ごろ|頃|辺り|あたり|らへん|前後)?(以前|以後|から|まで)?""".r
var time: Seq[String] = Nil
{for (m <- timeRegex.findAllMatchIn(
TimeExtractionPreprocessor.convert(sentence).getOrElse("")
) if isTime(m)) yield {
time = Seq[String](m.group(0))
val (beginOpt, endOpt): (Option[Int], Option[Int]) =
reviseThird(
reviseSecond(
reviseFirst(
parseTime(
toIntOption(
StringOption(m.group(2))
),
m.group(3),
StringOption(m.group(1))),
m.group(4)),
m.group(5)),
m.group(6))
new TimeTmp(
beginOpt,
endOpt,
if (beginOpt.nonEmpty) time else Nil,
if (endOpt.nonEmpty) time else Nil)
}}.toSeq
}
private def parse千年紀(numOpt: Option[Int], clueOpt: StringOption): BeginCountOptions = {
numOpt match {
case Some(n) =>
if (clueOpt.nonEmpty) {
(Option(n * -1000), Option(999))
} else {
(Option((n - 1) * 1000 + 1), Option(999))
}
case None =>
(None, None)
}
}
private def parse世紀(numOpt: Option[Int], clueOpt: StringOption): BeginCountOptions = {
numOpt match {
case Some(n) =>
if (clueOpt.nonEmpty) {
(Option(n * -100), Option(99))
} else {
(Option((n - 1) * 100 + 1), Option(99))
}
case None =>
(None, None)
}
}
private def parse年代(numOpt: Option[Int], clueOpt: StringOption): BeginCountOptions = {
numOpt match {
case Some(n) =>
if (clueOpt.nonEmpty) {
(Option(n * -1 - 9), Option(10))
} else {
(Option(n), Option(10))
}
case None =>
(None, None)
}
}
private def parse年(numOpt: Option[Int], clueOpt: StringOption): BeginCountOptions = {
numOpt match {
case Some(n) =>
if (clueOpt.nonEmpty) {
(Option(n * -1), Option(0))
} else {
val n2: Int = if (-100 < n && n < 100) {
n + romanCalendar
} else if (0 <= n) {
romanCalendar = n / 100 * 100
n
} else {
0
}
(Option(n2), Option(0))
}
case None =>
(None, None)
}
}
private def parseTime(numOpt: Option[Int], clue1: String, clue2Opt: StringOption): BeginCountOptions = {
clue1 match {
case "千年紀" =>
parse千年紀(numOpt, clue2Opt)
case "世紀" =>
parse世紀(numOpt, clue2Opt)
case "年代" =>
parse年代(numOpt, clue2Opt)
case "年" =>
parse年(numOpt, clue2Opt)
case "現代" =>
(Option(1918), None)
case "近代" =>
(Option(1700), Option(1918 - 1700))
case "近現代" =>
(Option(1700), None)
case "近世" =>
(Option(1453), Option(1700 - 1453))
case "中世" =>
(Option(476), Option(1453 - 476))
case "古代" =>
(Option(-2600), Option(476 + 2600))
case _ =>
(None, None)
}
}
private def reviseFirst(beginCountOpt: BeginCountOptions, clue: String): BeginEndOptions = {
val (beginOpt, countOpt) = beginCountOpt
clue match {
case "以前" | "まで" => beginOpt match {
case Some(b) =>
return (None, Option(b + countOpt.getOrElse(0)))
case None =>
}
case "以後" | "から" =>
return (beginOpt, None)
case "前半" | "前期" => beginOpt match {
case Some(b) =>
return (beginOpt, Option(b + (countOpt.getOrElse(0) * 0.5).toInt))
case None =>
}
case "後半" | "後期" => beginOpt match {
case Some(b) =>
val count = countOpt.getOrElse(0)
return (Option(b + (count * 0.5).toInt), Option(b + count))
case None =>
}
case "初頭" | "初期" | "初め" | "はじめ" => beginOpt match {
case Some(b) =>
return (beginOpt, Option(b + (countOpt.getOrElse(0) * 0.3).toInt))
case None =>
}
case "末" | "末期" | "終り" | "終わり" | "おわり" => beginOpt match {
case Some(b) =>
val count = countOpt.getOrElse(0)
return (Option(b + (count * 0.7).toInt), Option(b + count))
case None =>
}
case _ => beginOpt match {
case Some(b) =>
return (beginOpt, Option(b + countOpt.getOrElse(0)))
case None =>
}
}
(None, None)
}
private def reviseSecond(beginEndOpt: BeginEndOptions, clue: String): BeginEndOptions = {
val (beginOpt, endOpt) = beginEndOpt
clue match {
case "ころ" | "ごろ" | "頃" | "辺り" | "あたり" | "らへん" | "前後" =>
beginOpt match {
case Some(b) =>
endOpt match {
case Some(e) =>
val c = 0 //if (b == e) {5} else {((e - b) * 0.2).toInt}
return (Option(b - c), Option(e + c))
case None =>
}
case None =>
}
case _ =>
}
(beginOpt, endOpt)
}
private def reviseThird(beginEndOpt: BeginEndOptions, clue: String): BeginEndOptions = {
val (beginOpt, endOpt) = beginEndOpt
clue match {
case "以前" | "まで" =>
(None, endOpt)
case "以後" | "から" =>
(beginOpt, None)
case _ =>
(beginOpt, endOpt)
}
}
private def isTime(m: Match): Boolean = {
StringOption(m.group(0)).nonEmpty &&
!Set("年間", "年分", "年にわたる", "年に渡る").contains(m.group(3)) &&
(
(
StringOption(m.group(2)).nonEmpty &&
StringOption(m.group(3)).nonEmpty
) ||
Set("現代", "近代", "近現代", "中世", "古代").contains(m.group(3))
)
}
private def toIntOption(numberCharactersOpt: StringOption): Option[Int] = {
numberCharactersOpt match {
case StringSome(n) =>
try {
Option(n.toInt)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
case StringNone =>
None
}
}
}
| ktr-skmt/FelisCatusZero | src/main/scala/time/TimeExtractorInTimeExpression.scala | Scala | apache-2.0 | 7,956 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.scaladsl.api.deser
import scala.language.higherKinds
import java.util.UUID
import scala.collection.compat._
import scala.collection.immutable
import scala.collection.immutable.Seq
/**
* A path param serializer is responsible for serializing and deserializing parameters that are extracted from and
* formatted into paths.
*
* When used in URLs, a path param serializer is used both for path segments as well as query string parameters. It is
* expected that the serializer will consume and return singleton sequences for path segments, but may return 0 to many
* values for query string parameters.
*/
trait PathParamSerializer[Param] {
/**
* Serialize the given `parameter` into path parameters.
*/
def serialize(parameter: Param): immutable.Seq[String]
/**
* Deserialize the `parameters` into a deserialized parameter.
*
* @return The deserialized parameter.
*/
def deserialize(parameters: immutable.Seq[String]): Param
}
object PathParamSerializer extends DefaultPathParamSerializers
trait DefaultPathParamSerializers extends LowPriorityPathParamSerializers {
/**
* Create a PathParamSerializer for required parameters.
*/
def required[Param](name: String)(
deserializeFunction: String => Param
)(serializeFunction: Param => String): PathParamSerializer[Param] = new NamedPathParamSerializer[Param](name) {
def serialize(parameter: Param): immutable.Seq[String] = immutable.Seq(serializeFunction(parameter))
def deserialize(parameters: immutable.Seq[String]): Param = parameters.headOption match {
case Some(parameter) => deserializeFunction(parameter)
case None => throw new IllegalArgumentException(name + " parameter is required")
}
}
/**
* A String path parameter serializer
*/
implicit val StringPathParamSerializer: PathParamSerializer[String] = required("String")(identity)(identity)
/**
* A Long path parameter serializer
*/
implicit val LongPathParamSerializer: PathParamSerializer[Long] = required("Long")(_.toLong)(_.toString)
/**
* An Int path parameter serializer
*/
implicit val IntPathParamSerializer: PathParamSerializer[Int] = required("Int")(_.toInt)(_.toString)
/**
* An Double path parameter serializer
*/
implicit val DoublePathParamSerializer: PathParamSerializer[Double] = required("Double")(_.toDouble)(_.toString)
/**
* A Boolean path parameter serializer
*/
implicit val BooleanPathParamSerializer: PathParamSerializer[Boolean] = required("Boolean")(_.toBoolean)(_.toString)
/**
* A UUID path parameter serializer
*/
implicit val UuidPathParamSerializer: PathParamSerializer[UUID] = required("UUID")(UUID.fromString)(_.toString)
/**
* An option path param serializer
*/
implicit def optionPathParamSerializer[Param](
implicit delegate: PathParamSerializer[Param]
): PathParamSerializer[Option[Param]] = {
val name = delegate match {
case named: NamedPathParamSerializer[_] => s"Option[${named.name}]"
case other => s"Option($other)"
}
new NamedPathParamSerializer[Option[Param]](name) {
override def serialize(parameter: Option[Param]): Seq[String] = parameter match {
case Some(param) => delegate.serialize(param)
case None => Nil
}
override def deserialize(parameters: Seq[String]): Option[Param] = parameters match {
case Nil => None
case nonNil => Some(delegate.deserialize(parameters))
}
}
}
}
trait LowPriorityPathParamSerializers {
sealed abstract class NamedPathParamSerializer[Param](val name: String) extends PathParamSerializer[Param] {
override def toString: String = "PathParamSerializer(" + name + ")"
}
/**
* A traversable path param serializer
*/
implicit def traversablePathParamSerializer[CC[X] <: Traversable[X], Param: PathParamSerializer](
implicit delegate: PathParamSerializer[Param],
bf: Factory[Param, CC[Param]]
): PathParamSerializer[CC[Param]] = {
val name = delegate match {
case named: NamedPathParamSerializer[_] => s"Traversable[${named.name}]"
case other => s"Traversable($other)"
}
new NamedPathParamSerializer[CC[Param]](name) {
override def serialize(parameter: CC[Param]): Seq[String] = parameter.flatMap(delegate.serialize).toIndexedSeq
override def deserialize(parameters: Seq[String]): CC[Param] = {
bf.fromSpecific(parameters.iterator.map(param => delegate.deserialize(Seq(param))))
}
}
}
}
| rcavalcanti/lagom | service/scaladsl/api/src/main/scala/com/lightbend/lagom/scaladsl/api/deser/PathParamSerializer.scala | Scala | apache-2.0 | 4,701 |
/*
* Copyright 2017 helloscala.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package helloscala.common.types
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import com.fasterxml.jackson.annotation.{JsonIgnore, JsonProperty, JsonTypeName}
import helloscala.common.util.StringUtils
import scala.util.{Failure, Try}
/**
* BSON ObjectId value.
*
* +------------------------+------------------------+------------------------+------------------------+
* + timestamp (in seconds) + machine identifier + thread identifier + increment +
* + (4 bytes) + (3 bytes) + (2 bytes) + (3 bytes) +
* +------------------------+------------------------+------------------------+------------------------+
*/
@JsonTypeName("string")
@SerialVersionUID(239421902L) //@ApiModel(parent = classOf[String])
class ObjectId private (private val raw: Array[Byte]) extends Serializable with Equals {
/** ObjectId hexadecimal String representation */
@JsonIgnore
lazy val stringify: String = Converters.hex2Str(raw)
override def toString() = stringify
override def canEqual(that: Any): Boolean = that.isInstanceOf[ObjectId]
override def equals(that: Any): Boolean = that match {
case other: ObjectId => java.util.Arrays.equals(raw, other.raw)
case _ => false
}
@JsonIgnore
override lazy val hashCode: Int = java.util.Arrays.hashCode(raw)
/** The time of this BSONObjectId, in milliseconds */
def time: Long = this.timeSecond * 1000L
/** The time of this BSONObjectId, in seconds */
def timeSecond: Int = ByteBuffer.wrap(raw.take(4)).getInt
def valueAsArray: Array[Byte] = java.util.Arrays.copyOf(raw, 12)
}
object ObjectId {
val STR_LENGTH = 24
private val maxCounterValue = 16777216
private val increment = new java.util.concurrent.atomic.AtomicInteger(scala.util.Random.nextInt(maxCounterValue))
private def counter() =
(increment.getAndIncrement + maxCounterValue) % maxCounterValue
/**
* The following implemtation of machineId work around openjdk limitations in
* version 6 and 7
*
* Openjdk fails to parse /proc/net/if_inet6 correctly to determine macaddress
* resulting in SocketException thrown.
*
* Please see:
* * https://github.com/openjdk-mirror/jdk7u-jdk/blob/feeaec0647609a1e6266f902de426f1201f77c55/src/solaris/native/java/net/NetworkInterface.c#L1130
* * http://lxr.free-electrons.com/source/net/ipv6/addrconf.c?v=3.11#L3442
* * http://lxr.free-electrons.com/source/include/linux/netdevice.h?v=3.11#L1130
* * http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=7078386
*
* and fix in openjdk8:
* * http://hg.openjdk.java.net/jdk8/tl/jdk/rev/b1814b3ea6d3
*/
private val machineId = {
import java.net._
def p(n: String) = System.getProperty(n)
val validPlatform = Try {
val correctVersion = p("java.version").substring(0, 3).toFloat >= 1.8
val noIpv6 = p("java.net.preferIPv4Stack").toBoolean
val isLinux = p("os.name") == "Linux"
!isLinux || correctVersion || noIpv6
}.getOrElse(false)
// Check java policies
val permitted =
Try(System.getSecurityManager.checkPermission(new NetPermission("getNetworkInformation"))).toOption.exists(_ =>
true)
if (validPlatform && permitted) {
val networkInterfacesEnum = NetworkInterface.getNetworkInterfaces
val networkInterfaces = scala.collection.JavaConverters
.enumerationAsScalaIteratorConverter(networkInterfacesEnum)
.asScala
val ha = networkInterfaces
.find(ha =>
Try(ha.getHardwareAddress).isSuccess && ha.getHardwareAddress != null && ha.getHardwareAddress.length == 6)
.map(_.getHardwareAddress)
.getOrElse(InetAddress.getLocalHost.getHostName.getBytes(StandardCharsets.UTF_8))
Converters.md5(ha).take(3)
} else {
val threadId = Thread.currentThread.getId.toInt
val arr = new Array[Byte](3)
arr(0) = (threadId & 0xFF).toByte
arr(1) = (threadId >> 8 & 0xFF).toByte
arr(2) = (threadId >> 16 & 0xFF).toByte
arr
}
}
// implicit def string2ObjectId(id: String): ObjectId = apply(id)
// Java API
def create(id: String): ObjectId = apply(id)
// Java API
def create(array: Array[Byte]): ObjectId = apply(array)
/**
* Constructs a BSON ObjectId element from a hexadecimal String representation.
* Throws an exception if the given argument is not a valid ObjectID.
*/
def apply(id: String): ObjectId = parse(id) match {
case scala.util.Success(value) => value
case scala.util.Failure(e) => throw e
}
def apply(array: Array[Byte]): ObjectId = {
if (array.length != 12)
throw new IllegalArgumentException(s"wrong byte array for an ObjectId (size ${array.length})")
new ObjectId(java.util.Arrays.copyOf(array, 12))
}
def unapply(id: ObjectId): Option[Array[Byte]] = Some(id.valueAsArray)
/** Tries to make a BSON ObjectId from a hexadecimal string representation. */
def parse(id: String): Try[ObjectId] = {
if (isValid(id)) Try(new ObjectId(Converters.str2Hex(id)))
else
Failure(new IllegalArgumentException(s"Wrong ObjectId (It is not a valid 16 Decimal 24 bit string): '$id'"))
}
def isValid(id: String): Boolean = {
StringUtils.isNoneBlank(id) && id.length == 24 && id.forall(Converters.isHex)
}
def isValid(ids: Iterable[String]): Boolean = {
ids.forall(isValid)
}
@inline def validation(id: String, msgPrefix: String = ""): Unit = {
require(isValid(id), s"$msgPrefix,$id 格式无效")
}
@inline def validation(ids: Iterable[String], msgPrefix: String): Unit = {
if (ids.nonEmpty) {
ids.foreach(id => validation(id, msgPrefix))
}
}
/**
* Generates a new BSON ObjectID using the current time.
*
* @see [[fromTime]]
*/
def generate(): ObjectId = get()
def get(): ObjectId =
fromTime(System.currentTimeMillis, fillOnlyTimestamp = false)
def getString(): String = get().toString()
/**
* Generates a new BSON ObjectID from the given timestamp in milliseconds.
*
* The included timestamp is the number of seconds since epoch, so a ObjectId time part has only
* a precision up to the second. To get a reasonably unique ID, you _must_ set `onlyTimestamp` to false.
*
* Crafting a ObjectId from a timestamp with `fillOnlyTimestamp` set to true is helpful for range queries,
* eg if you want of find documents an _id field which timestamp part is greater than or lesser than
* the one of another id.
*
* If you do not intend to use the produced ObjectId for range queries, then you'd rather use
* the `generate` method instead.
*
* @param fillOnlyTimestamp if true, the returned ObjectId will only have the timestamp bytes set; the other will be set to zero.
*/
def fromTime(timeMillis: Long, fillOnlyTimestamp: Boolean = true): ObjectId = {
// n of seconds since epoch. Big endian
val timestamp = (timeMillis / 1000).toInt
val id = new Array[Byte](12)
id(0) = (timestamp >>> 24).toByte
id(1) = (timestamp >> 16 & 0xFF).toByte
id(2) = (timestamp >> 8 & 0xFF).toByte
id(3) = (timestamp & 0xFF).toByte
if (!fillOnlyTimestamp) {
// machine id, 3 first bytes of md5(macadress or hostname)
id(4) = machineId(0)
id(5) = machineId(1)
id(6) = machineId(2)
// 2 bytes of the pid or thread id. Thread id in our case. Low endian
val threadId = Thread.currentThread.getId.toInt
id(7) = (threadId & 0xFF).toByte
id(8) = (threadId >> 8 & 0xFF).toByte
// 3 bytes of counter sequence, which start is randomized. Big endian
val c = counter()
id(9) = (c >> 16 & 0xFF).toByte
id(10) = (c >> 8 & 0xFF).toByte
id(11) = (c & 0xFF).toByte
}
ObjectId(id)
}
}
/** Common functions */
object Converters {
private val HEX_CHARS: Array[Char] = "0123456789abcdef".toCharArray
private val HEX_CHAR_SETS = Set
.empty[Char] ++ ('0' to '9') ++ ('a' to 'f') ++ ('A' to 'F')
def isHex(c: Char): Boolean = {
HEX_CHAR_SETS.contains(c)
}
/** Turns an array of Byte into a String representation in hexadecimal. */
def hex2Str(bytes: Array[Byte]): String = {
val hex = new Array[Char](2 * bytes.length)
var i = 0
while (i < bytes.length) {
hex(2 * i) = HEX_CHARS((bytes(i) & 0xF0) >>> 4)
hex(2 * i + 1) = HEX_CHARS(bytes(i) & 0x0F)
i = i + 1
}
new String(hex)
}
/** Turns a hexadecimal String into an array of Byte. */
def str2Hex(str: String): Array[Byte] = {
val bytes = new Array[Byte](str.length / 2)
var i = 0
while (i < bytes.length) {
bytes(i) = Integer.parseInt(str.substring(2 * i, 2 * i + 2), 16).toByte
i += 1
}
bytes
}
/**
* Returns the MD5 hash for the given `string`,
* and turns it into a hexadecimal String representation.
*
* @param string the string to be hashed
* @param encoding the string encoding/charset
*/
def md5Hex(string: String, encoding: String): String =
hex2Str(md5(string, encoding))
/**
* Returns the MD5 hash of the given `string`.
*
* @param string the string to be hashed
* @param encoding the string encoding/charset
*/
def md5(string: String, encoding: String): Array[Byte] =
md5(string.getBytes(encoding))
/** Computes the MD5 hash of the given `bytes`. */
def md5(bytes: Array[Byte]): Array[Byte] =
java.security.MessageDigest.getInstance("MD5").digest(bytes)
}
| helloscala/helloscala | hs-core/src/main/scala/helloscala/common/types/ObjectId.scala | Scala | apache-2.0 | 10,109 |
trait C extends B
{
def x = 3
} | olove/xsbt | sbt/src/sbt-test/source-dependencies/transitive-inherit/C.scala | Scala | bsd-3-clause | 32 |
package com.seanshubin.todo.sample.client
import org.apache.http.entity.AbstractHttpEntity
import java.io.{ByteArrayInputStream, OutputStream, InputStream}
import com.seanshubin.todo.sample.core.http.Content
class RepeatableEntity(bytes: Array[Byte], theContentType: String) extends AbstractHttpEntity {
setContentType(theContentType)
override def isRepeatable: Boolean = true
override def isStreaming: Boolean = false
override def writeTo(outputStream: OutputStream): Unit = outputStream.write(bytes)
override def getContent: InputStream = new ByteArrayInputStream(bytes)
override def getContentLength: Long = bytes.length
}
object RepeatableEntity {
def create(content: Content): RepeatableEntity = {
val repeatableEntity = new RepeatableEntity(content.bytes, content.contentType)
content.maybeCharsetName.foreach {
charsetName => repeatableEntity.setContentEncoding(charsetName)
}
repeatableEntity
}
}
| SeanShubin/javascript-todo-samples | client/src/main/scala/com/seanshubin/todo/sample/client/RepeatableEntity.scala | Scala | unlicense | 951 |
package org.pfcoperez.scalawk.states
import org.pfcoperez.scalawk.AwkCommand
import org.pfcoperez.scalawk.entities.SideEffectStatement
class SolidCommandWithLastAction(lastAction: Seq[SideEffectStatement])(prevSt: SolidCommand)
extends SolidCommand(prevSt) {
override val endProgram: Seq[SideEffectStatement] = lastAction
}
| pfcoperez/scalawk | src/main/scala/org/pfcoperez/scalawk/states/SolidCommandWithLastAction.scala | Scala | mit | 332 |
package com.lucidchart.open.nark.models.records
import java.util.UUID
case class Target(
id: UUID,
graphId: UUID,
name: String,
target: String,
summarizer: TargetSummarizer.Value,
deleted: Boolean
) extends AppRecord {
/**
* Create a new Target record for inserting into the database
*/
def this(graphId: UUID, name: String, target: String, summarizer: TargetSummarizer.Value) = this(UUID.randomUUID(), graphId, name, target, summarizer, false)
}
object TargetSummarizer extends Enumeration {
val average = Value(0, "avg")
val sum = Value(1, "sum")
val max = Value(2, "max")
val min = Value(3, "min")
val last = Value(4, "last")
}
| lucidsoftware/nark | app/com/lucidchart/open/nark/models/records/Target.scala | Scala | apache-2.0 | 666 |
package org.somelightprojections.skeres.examples
import com.google.ceres.{NumericDiffMethodType, PredefinedLossFunctions, Solver, ceres}
import org.somelightprojections.skeres._
object HelloWorldNumericDiff {
object HelloCostFunctor extends NumericDiffCostFunctor(1, 1) {
override def apply(x: Array[Double]*): Array[Double] = Array(10.0 - x(0)(0))
}
def main(args: Array[String]): Unit = {
ceres.initGoogleLogging("HelloWorld")
val initialX = 0.5
val x = RichDoubleArray.ofSize(1)
x.set(0, initialX)
val problem = new Problem
val cost = HelloCostFunctor.toNumericDiffCostFunction(NumericDiffMethodType.CENTRAL)
val loss = PredefinedLossFunctions.trivialLoss
problem.addResidualBlock(cost, loss, x.toPointer)
val solverOptions = new Solver.Options
solverOptions.setMinimizerProgressToStdout(true)
val summary = new Solver.Summary
ceres.solve(solverOptions, problem, summary)
println(summary.briefReport)
}
}
| fgcallari/skeres | examples/src/main/scala/org/somelightprojections/skeres/examples/HelloWorldNumericDiff.scala | Scala | bsd-3-clause | 982 |
/*
* Copyright 2014 Joshua R. Rodgers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================================
*/
package com.theenginerd.ccscada.peripheral
import net.minecraftforge.common.ForgeDirection
import dan200.computer.api.IComputerAccess
import powercrystals.minefactoryreloaded.api.rednet.IRedNetNetworkContainer
import net.minecraft.block.Block
import com.theenginerd.ccscada.util.AsType
import com.theenginerd.ccscada.util.Conversions._
trait RedNetCableSupport extends Peripheral
{
private val defaultValues = Vector.fill(16)(0)
private var outputValues: Map[ForgeDirection, Vector[Int]] = Map()
private var inputValues: Map[ForgeDirection, Vector[Int]] = Map()
//Bundled Input method handlers
registerMethod("getBundledInput", getBundledInput)
registerMethod("getBundledOutput", getBundledOutput)
registerMethod("setBundledOutput", setBundledOutput)
registerMethod("testBundledInput", testBundledInput)
//Individual subnet handlers
registerMethod("setSubnetOutput", setSubnetOutput)
registerMethod("getSubnetOutput", getSubnetOutput)
registerMethod("getSubnetInput", getSubnetInput)
def getInputValues(side: ForgeDirection) =
this.synchronized
{
inputValues.getOrElse(side, defaultValues)
}
def getOutputValues(side: ForgeDirection) =
this.synchronized
{
outputValues.getOrElse(side, defaultValues)
}
def setInputValues(side: ForgeDirection, values: Vector[Int]) =
this.synchronized
{
inputValues += (side -> values)
}
def setOutputValues(side: ForgeDirection, values: Vector[Int]) =
{
this.synchronized
{
outputValues += (side -> values)
}
execute
{
notifyNeighborOnSideOfUpdate(side)
}
}
private def notifyNeighborOnSideOfUpdate(outputSide: ForgeDirection) =
{
def updateNeighborCable(x: Int, y: Int, z: Int) =
{
val blockId = getWorld.getBlockId(x, y, z)
val block = Option(Block.blocksList(blockId))
for(AsType(cable: IRedNetNetworkContainer) <- block)
cable.updateNetwork(getWorld, x, y, z)
}
updateNeighborCable(xCoordinate + outputSide.offsetX, yCoordinate + outputSide.offsetY, zCoordinate + outputSide.offsetZ)
}
private def getBundledInput(computer: IComputerAccess, arguments: Array[AnyRef]): Array[AnyRef] =
{
arguments match
{
case Array(sideName: String, _*) =>
Array(convertToBundleState(getInputValues(sideName)).asInstanceOf[AnyRef])
case _ =>
throw new Exception("Invalid argument (side).")
}
}
private def getBundledOutput(computer: IComputerAccess, arguments: Array[AnyRef]) =
{
arguments match
{
case Array(sideName: String, _*) =>
Array(convertToBundleState(getOutputValues(sideName)).asInstanceOf[AnyRef])
case _ =>
throw new Exception("Invalid argument (side).")
}
}
private def convertToBundleState(array: Vector[Int]) =
{
array.view
.zipWithIndex
.foldLeft(0)
{
case (accumulator, (value, index)) =>
accumulator | (if (value > 0) 0x1 << index else 0)
}
}
private def setBundledOutput(computer: IComputerAccess, arguments: Array[AnyRef]): Array[AnyRef] =
{
arguments match
{
case Array(sideName: String, colors: java.lang.Double, _*) =>
val value = colors.toInt
var result = defaultValues
for (index <- 0 to 15)
{
if (((value >> index) & 0x1) == 0x1)
{
result = result.updated(index, 15)
}
}
setOutputValues(sideName, result)
null
case _ =>
throw new Exception("Invalid arguments (side, colors).")
}
}
private def testBundledInput(computer: IComputerAccess, arguments: Array[AnyRef]): Array[AnyRef] =
{
arguments match
{
case Array(sideName: String, color: java.lang.Double, _*) =>
val index = Math.getExponent(color.toDouble)
val inputValues = getInputValues(sideName)
if(index < 0 || index > inputValues.length)
throw new Exception("Invalid argument (color).")
Array((inputValues(index) > 0).asInstanceOf[AnyRef])
case _ =>
throw new Exception("Invalid arguments (side, color).")
}
}
private def setSubnetOutput(computer: IComputerAccess, arguments: Array[AnyRef]): Array[AnyRef] =
{
arguments match
{
case Array(sideName: String, subnet: java.lang.Double, value: java.lang.Double, _*) =>
val outputs = getOutputValues(sideName)
setOutputValues(sideName, outputs.updated(subnet.toInt, value.toInt))
null
case _ =>
throw new Exception("Invalid arguments (side, subnet)")
}
}
private def getSubnetOutput(computer: IComputerAccess, arguments: Array[AnyRef]): Array[AnyRef] =
{
arguments match
{
case Array(sideName: String, subnet: java.lang.Double, _*) =>
val outputs = getOutputValues(sideName)
Array(outputs(subnet.toInt).asInstanceOf[AnyRef])
case _ =>
throw new Exception("Invalid arguments (side, subnet)")
}
}
private def getSubnetInput(computer: IComputerAccess, arguments: Array[AnyRef]): Array[AnyRef] =
{
arguments match
{
case Array(sideName: String, subnet: java.lang.Double, _*) =>
val outputs = getInputValues(sideName)
Array(outputs(subnet.toInt).asInstanceOf[AnyRef])
case _ =>
throw new Exception("Invalid arguments (side, subnet)")
}
}
}
| Mr-Byte/ccSCADA | src/main/scala/com/theenginerd/ccscada/peripheral/RedNetCableSupport.scala | Scala | apache-2.0 | 6,808 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.