code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
val x: () = ()
/*start*/x/*end*/
//Unit | ilinum/intellij-scala | testdata/typeInference/literals/UnitType.scala | Scala | apache-2.0 | 39 |
/**
* Copyright 2011 Green Energy Corp.
*
* Licensed to Green Energy Corp (www.greenenergycorp.com) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. Green Energy
* Corp licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.totalgrid.reef.protocol.dnp3
import org.totalgrid.reef.protocol.dnp3.mock._
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
import org.scalatest.FunSuite
@RunWith(classOf[JUnitRunner])
class DNP3BindingTest extends FunSuite with ShouldMatchers {
val startPort = 32323
val lev = FilterLevel.LEV_INFO
class MockStateObserver {
import scala.collection.mutable.{ Queue, Map }
val states = Map.empty[String, Queue[StackStates]]
var observers = List.empty[IStackObserver]
def getObserver(name: String): IStackObserver = {
val obs = new IStackObserver {
override def OnStateChange(state: StackStates) {
states.get(name) match {
case Some(l: Queue[StackStates]) => l.enqueue(state)
case None => states += name -> Queue(state)
}
}
}
// need to keep a reference to the observer so it doesn't get GCed
observers ::= obs
obs
}
def checkStates(names: List[String], expected: List[StackStates]) {
names.foreach { name =>
states.get(name).map { _.toList }.getOrElse(List.empty[StackStates]) should equal(expected)
}
}
}
def fixture(testFun: StackManager => Unit) = {
val sm = new StackManager
try {
testFun(sm)
} finally {
sm.Shutdown()
}
}
/// This test shows that the startup/teardown behavior is working without crashing
test("StartupTeardownOnJVM") {
val num_port = 100
val num_stack = 10
fixture { sm =>
val stateObserver = new MockStateObserver
var names = List.empty[String]
// startup <num_stack> masters on <num_port> ports
(1 to num_port).foreach { port =>
val s = new PhysLayerSettings(FilterLevel.LEV_WARNING, 1000)
sm.AddTCPv4Client(port.toString, s, "127.0.0.1", startPort)
(1 to num_stack).foreach { stack =>
val name = "port-" + port + "-stack" + stack
names ::= name
val cfg = new MasterStackConfig
cfg.getLink.setLocalAddr(stack)
cfg.getMaster.setMpObserver(stateObserver.getObserver(name))
// the masters won't get any data, so setting the IPublisher to null is OK
sm.AddMaster(port.toString, name, FilterLevel.LEV_WARNING, null, cfg)
}
}
stateObserver.checkStates(names, List(StackStates.SS_COMMS_DOWN))
}
}
test("MasterToSlaveOnJVM") {
val num_pairs = 100
val port_start = startPort
val port_end = port_start + num_pairs - 1
val counter = new CountingPublisher
val stateObserver = new MockStateObserver
var names = List.empty[String]
fixture { sm =>
// sm.AddLogHook(adapter)
val master = new MasterStackConfig
master.getMaster.setIntegrityRate(60000)
val slave = new SlaveStackConfig
slave.setDevice(new DeviceTemplate(100, 100, 100))
val s = new PhysLayerSettings(lev, 1000)
(port_start to port_end).foreach { port =>
val client = "client-" + port
val server = "server-" + port
sm.AddTCPv4Client(client, s, "127.0.0.1", port)
sm.AddTCPv4Server(server, s, "0.0.0.0", port)
master.getMaster.setMpObserver(stateObserver.getObserver(server))
names ::= server
sm.AddMaster(client, client, lev, counter.newPublisher, master)
sm.AddSlave(server, server, lev, null, slave)
}
counter.waitForMinMessages(300, 10000) should equal(true)
}
// make sure we got the down-up-down callbacks we expected
stateObserver.checkStates(names, List(StackStates.SS_COMMS_DOWN, StackStates.SS_COMMS_UP, StackStates.SS_COMMS_DOWN))
}
}
| cverges/dnp3 | DNP3Java/src/test/scala/DNP3BindingTest.scala | Scala | apache-2.0 | 4,585 |
package jsky.app.ot.gemini.editor.auxfile
import edu.gemini.auxfile.api.AuxFileException
import jsky.util.gui.Resources
import scala.collection.JavaConverters._
import scala.swing.{Component, Dialog}
class RemoveAction(c: Component, model: AuxFileModel) extends AuxFileAction("Remove", c, model) {
icon = Resources.getIcon("eclipse/remove.gif")
toolTip = "Delete the selected file attachment."
override def interpret(ex: AuxFileException) =
s"Sorry, there was an error while removing files: '${ex.getMessage}'"
override def currentEnabledState: Boolean = super.currentEnabledState &&
model.currentSelection.exists(!_.isEmpty)
private def confirmed: Boolean = {
Dialog.showOptions(c, "Remove selected files on the server?", "Remove Files?", Dialog.Options.YesNo, Dialog.Message.Question, null, List("Remove", "Cancel"), 1) match {
case Dialog.Result.Yes => true
case Dialog.Result.Ok => true
case _ => false
}
}
override def apply() {
exec(model.currentSelection.filter(_ => confirmed)) { (client, pid, selection) =>
client.delete(pid, selection.map(_.getName).asJavaCollection)
}
}
}
| spakzad/ocs | bundle/jsky.app.ot/src/main/scala/jsky/app/ot/gemini/editor/auxfile/RemoveAction.scala | Scala | bsd-3-clause | 1,160 |
package x7c1.wheat.modern.callback
import x7c1.wheat.modern.kinds.CallbackBase
import scala.concurrent.{Future, Promise}
class CallbackTask[EVENT](
callback: (EVENT => Unit) => Unit) extends CallbackBase[EVENT] {
override type This[A] = CallbackTask[A]
override def apply(f: EVENT => Unit): Unit = callback(f)
def execute(): Unit = callback(_ => ())
def toFuture: Future[EVENT] = {
val promise = Promise[EVENT]()
try callback { event =>
promise trySuccess event
} catch { case e: Throwable =>
promise tryFailure e
}
promise.future
}
}
object CallbackTask {
import scala.language.implicitConversions
implicit def apply[EVENT](execute: (EVENT => Unit) => Unit): CallbackTask[EVENT] = {
new CallbackTask(execute)
}
def task = TaskProvider
}
| x7c1/Linen | wheat-modern/src/main/scala/x7c1/wheat/modern/callback/CallbackTask.scala | Scala | mit | 803 |
package com.example.akkaTcpChat
import akka.actor.{Props, Actor}
import akka.io.{IO, Tcp}
import java.net.InetSocketAddress
import com.example.akkaTcpChat.handler.Hub
import akka.event.Logging
object Server {
def props(host: InetSocketAddress): Props = {
Props(new Server(host))
}
}
class Server(host: InetSocketAddress) extends Actor {
import context.system
val log = Logging(context.system, this)
IO(Tcp) ! Tcp.Bind(self, host)
val hub = context.actorOf(Props[Hub])
def receive = {
case b @ Tcp.Bound(localAddress) =>
log.debug("Spawned " + localAddress.toString)
case Tcp.CommandFailed(_: Tcp.Bind) =>
println("Command failed. Going to stop server.")
context stop self
case c @ Tcp.Connected(remote, local) =>
log.debug("Connected " + remote.toString)
hub ! Hub.Register(remote.toString, sender)
}
}
| spoconnor/ElixirMessagingServer | ScalaServer/src/main/scala/Server.scala | Scala | mit | 877 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.propspec
import org.scalatest._
import SharedHelpers._
import events.TestFailed
import org.scalactic.exceptions.NullArgumentException
import org.scalatest.exceptions.DuplicateTestNameException
import org.scalatest.exceptions.TestFailedException
import org.scalatest.exceptions.TestRegistrationClosedException
import org.scalatest
import org.scalatest.propspec
class FixturePropSpecSpec extends scalatest.funspec.AnyFunSpec {
describe("A fixture.PropSpec") {
it("should return the test names in order of registration from testNames") {
val a = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
property("that") { fixture =>
/* ASSERTION_SUCCEED */
}
property("this") { fixture =>
/* ASSERTION_SUCCEED */
}
}
assertResult(List("that", "this")) {
a.testNames.iterator.toList
}
val b = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
}
assertResult(List[String]()) {
b.testNames.iterator.toList
}
val c = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
property("this") { fixture =>
/* ASSERTION_SUCCEED */
}
property("that") { fixture =>
/* ASSERTION_SUCCEED */
}
}
assertResult(List("this", "that")) {
c.testNames.iterator.toList
}
}
it("should throw NotAllowedException if a duplicate test name registration is attempted") {
intercept[DuplicateTestNameException] {
new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
property("test this") { fixture =>
/* ASSERTION_SUCCEED */
}
property("test this") { fixture =>
/* ASSERTION_SUCCEED */
}
}
}
intercept[DuplicateTestNameException] {
new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
property("test this") { fixture =>
/* ASSERTION_SUCCEED */
}
ignore("test this") { fixture =>
/* ASSERTION_SUCCEED */
}
}
}
intercept[DuplicateTestNameException] {
new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
ignore("test this") { fixture =>
/* ASSERTION_SUCCEED */
}
ignore("test this") { fixture =>
/* ASSERTION_SUCCEED */
}
}
}
intercept[DuplicateTestNameException] {
new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
ignore("test this") { fixture =>
/* ASSERTION_SUCCEED */
}
property("test this") { fixture =>
/* ASSERTION_SUCCEED */
}
}
}
}
it("should pass in the fixture to every test method") {
val a = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
property("this") { fixture =>
assert(fixture === hello)
}
property("that") { fixture =>
assert(fixture === hello)
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
assert(!rep.eventsReceived.exists(_.isInstanceOf[TestFailed]))
}
it("should throw NullArgumentException if a null test tag is provided") {
// test
intercept[NullArgumentException] {
new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
property("hi", null) { fixture => /* ASSERTION_SUCCEED */ }
}
}
val caught = intercept[NullArgumentException] {
new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
property("hi", mytags.SlowAsMolasses, null) { fixture => /* ASSERTION_SUCCEED */ }
}
}
assert(caught.getMessage === "a test tag was null")
intercept[NullArgumentException] {
new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
property("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) { fixture => /* ASSERTION_SUCCEED */ }
}
}
// ignore
intercept[NullArgumentException] {
new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
ignore("hi", null) { fixture => /* ASSERTION_SUCCEED */ }
}
}
val caught2 = intercept[NullArgumentException] {
new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
ignore("hi", mytags.SlowAsMolasses, null) { fixture => /* ASSERTION_SUCCEED */ }
}
}
assert(caught2.getMessage === "a test tag was null")
intercept[NullArgumentException] {
new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
ignore("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) { fixture => /* ASSERTION_SUCCEED */ }
}
}
// registerTest
intercept[NullArgumentException] {
new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerTest("hi", null) { fixture => /* ASSERTION_SUCCEED */ }
}
}
val caught3 = intercept[NullArgumentException] {
new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerTest("hi", mytags.SlowAsMolasses, null) { fixture => /* ASSERTION_SUCCEED */ }
}
}
assert(caught3.getMessage === "a test tag was null")
intercept[NullArgumentException] {
new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerTest("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) { fixture => /* ASSERTION_SUCCEED */ }
}
}
// registerIgnoredTest
intercept[NullArgumentException] {
new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerIgnoredTest("hi", null) { fixture => /* ASSERTION_SUCCEED */ }
}
}
val caught4 = intercept[NullArgumentException] {
new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerIgnoredTest("hi", mytags.SlowAsMolasses, null) { fixture => /* ASSERTION_SUCCEED */ }
}
}
assert(caught4.getMessage === "a test tag was null")
intercept[NullArgumentException] {
new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerIgnoredTest("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) { fixture => /* ASSERTION_SUCCEED */ }
}
}
}
class TestWasCalledSuite extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
property("this") { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
property("that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
it("should execute all tests when run is called with testName None") {
val b = new TestWasCalledSuite
b.run(None, Args(SilentReporter))
assert(b.theTestThisCalled)
assert(b.theTestThatCalled)
}
it("should execute one test when run is called with a defined testName") {
val a = new TestWasCalledSuite
a.run(Some("this"), Args(SilentReporter))
assert(a.theTestThisCalled)
assert(!a.theTestThatCalled)
}
it("should report as ignored, and not run, tests marked ignored") {
class SpecA extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
property("test this") { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
property("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val a = new SpecA
import scala.language.reflectiveCalls
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
class SpecB extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
ignore("test this") { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
property("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val b = new SpecB
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB))
assert(repB.testIgnoredReceived)
assert(repB.lastEvent.isDefined)
assert(repB.lastEvent.get.testName endsWith "test this")
assert(!b.theTestThisCalled)
assert(b.theTestThatCalled)
class SpecC extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
property("test this") { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
ignore("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val c = new SpecC
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repC))
assert(repC.testIgnoredReceived)
assert(repC.lastEvent.isDefined)
assert(repC.lastEvent.get.testName endsWith "test that", repC.lastEvent.get.testName)
assert(c.theTestThisCalled)
assert(!c.theTestThatCalled)
// The order I want is order of appearance in the file.
// Will try and implement that tomorrow. Subtypes will be able to change the order.
class SpecD extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
ignore("test this") { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
ignore("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val d = new SpecD
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD))
assert(repD.testIgnoredReceived)
assert(repD.lastEvent.isDefined)
assert(repD.lastEvent.get.testName endsWith "test that") // last because should be in order of appearance
assert(!d.theTestThisCalled)
assert(!d.theTestThatCalled)
}
it("should ignore a test marked as ignored if run is invoked with that testName") {
// If I provide a specific testName to run, then it should ignore an Ignore on that test
// method and actually invoke it.
class SpecE extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
ignore("test this") { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
property("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val e = new SpecE
import scala.language.reflectiveCalls
val repE = new TestIgnoredTrackingReporter
e.run(Some("test this"), Args(repE))
assert(repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(!e.theTestThatCalled)
}
it("should run only those tests selected by the tags to include and exclude sets") {
// Nothing is excluded
class SpecA extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
property("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
property("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val a = new SpecA
import scala.language.reflectiveCalls
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
// SlowAsMolasses is included, one test should be excluded
class SpecB extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
property("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
property("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val b = new SpecB
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repB.testIgnoredReceived)
assert(b.theTestThisCalled)
assert(!b.theTestThatCalled)
// SlowAsMolasses is included, and both tests should be included
class SpecC extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
property("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
property("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val c = new SpecC
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repC.testIgnoredReceived)
assert(c.theTestThisCalled)
assert(c.theTestThatCalled)
// SlowAsMolasses is included. both tests should be included but one ignored
class SpecD extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
ignore("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
property("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val d = new SpecD
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repD.testIgnoredReceived)
assert(!d.theTestThisCalled)
assert(d.theTestThatCalled)
// SlowAsMolasses included, FastAsLight excluded
class SpecE extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
property("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
property("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
property("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val e = new SpecE
val repE = new TestIgnoredTrackingReporter
e.run(None, Args(repE, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(e.theTestThatCalled)
assert(!e.theTestTheOtherCalled)
// An Ignored test that was both included and excluded should not generate a TestIgnored event
class SpecF extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
ignore("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
property("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
property("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val f = new SpecF
val repF = new TestIgnoredTrackingReporter
f.run(None, Args(repF, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repF.testIgnoredReceived)
assert(!f.theTestThisCalled)
assert(f.theTestThatCalled)
assert(!f.theTestTheOtherCalled)
// An Ignored test that was not included should not generate a TestIgnored event
class SpecG extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
property("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
property("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
ignore("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val g = new SpecG
val repG = new TestIgnoredTrackingReporter
g.run(None, Args(repG, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repG.testIgnoredReceived)
assert(!g.theTestThisCalled)
assert(g.theTestThatCalled)
assert(!g.theTestTheOtherCalled)
// No tagsToInclude set, FastAsLight excluded
class SpecH extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
property("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
property("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
property("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val h = new SpecH
val repH = new TestIgnoredTrackingReporter
h.run(None, Args(repH, Stopper.default, Filter(None, Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repH.testIgnoredReceived)
assert(!h.theTestThisCalled)
assert(h.theTestThatCalled)
assert(h.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded
class SpecI extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
property("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
property("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
property("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val i = new SpecI
val repI = new TestIgnoredTrackingReporter
i.run(None, Args(repI, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!i.theTestThisCalled)
assert(!i.theTestThatCalled)
assert(i.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded, TestIgnored should not be received on excluded ones
class SpecJ extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
ignore("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
ignore("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
property("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val j = new SpecJ
val repJ = new TestIgnoredTrackingReporter
j.run(None, Args(repJ, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!j.theTestThisCalled)
assert(!j.theTestThatCalled)
assert(j.theTestTheOtherCalled)
// Same as previous, except Ignore specifically mentioned in excludes set
class SpecK extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
ignore("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
ignore("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
ignore("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val k = new SpecK
val repK = new TestIgnoredTrackingReporter
k.run(None, Args(repK, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses", "org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repK.testIgnoredReceived)
assert(!k.theTestThisCalled)
assert(!k.theTestThatCalled)
assert(!k.theTestTheOtherCalled)
}
it("should run only those registered tests selected by the tags to include and exclude sets") {
// Nothing is excluded
class SpecA extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
registerTest("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val a = new SpecA
import scala.language.reflectiveCalls
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
// SlowAsMolasses is included, one test should be excluded
class SpecB extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
registerTest("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val b = new SpecB
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repB.testIgnoredReceived)
assert(b.theTestThisCalled)
assert(!b.theTestThatCalled)
// SlowAsMolasses is included, and both tests should be included
class SpecC extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
registerTest("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val c = new SpecC
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repC.testIgnoredReceived)
assert(c.theTestThisCalled)
assert(c.theTestThatCalled)
// SlowAsMolasses is included. both tests should be included but one ignored
class SpecD extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val d = new SpecD
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repD.testIgnoredReceived)
assert(!d.theTestThisCalled)
assert(d.theTestThatCalled)
// SlowAsMolasses included, FastAsLight excluded
class SpecE extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val e = new SpecE
val repE = new TestIgnoredTrackingReporter
e.run(None, Args(repE, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(e.theTestThatCalled)
assert(!e.theTestTheOtherCalled)
// An Ignored test that was both included and excluded should not generate a TestIgnored event
class SpecF extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val f = new SpecF
val repF = new TestIgnoredTrackingReporter
f.run(None, Args(repF, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repF.testIgnoredReceived)
assert(!f.theTestThisCalled)
assert(f.theTestThatCalled)
assert(!f.theTestTheOtherCalled)
// An Ignored test that was not included should not generate a TestIgnored event
class SpecG extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerIgnoredTest("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val g = new SpecG
val repG = new TestIgnoredTrackingReporter
g.run(None, Args(repG, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repG.testIgnoredReceived)
assert(!g.theTestThisCalled)
assert(g.theTestThatCalled)
assert(!g.theTestTheOtherCalled)
// No tagsToInclude set, FastAsLight excluded
class SpecH extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val h = new SpecH
val repH = new TestIgnoredTrackingReporter
h.run(None, Args(repH, Stopper.default, Filter(None, Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repH.testIgnoredReceived)
assert(!h.theTestThisCalled)
assert(h.theTestThatCalled)
assert(h.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded
class SpecI extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val i = new SpecI
val repI = new TestIgnoredTrackingReporter
i.run(None, Args(repI, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!i.theTestThisCalled)
assert(!i.theTestThatCalled)
assert(i.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded, TestIgnored should not be received on excluded ones
class SpecJ extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerIgnoredTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val j = new SpecJ
val repJ = new TestIgnoredTrackingReporter
j.run(None, Args(repJ, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!j.theTestThisCalled)
assert(!j.theTestThatCalled)
assert(j.theTestTheOtherCalled)
// Same as previous, except Ignore specifically mentioned in excludes set
class SpecK extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerIgnoredTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerIgnoredTest("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val k = new SpecK
val repK = new TestIgnoredTrackingReporter
k.run(None, Args(repK, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses", "org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repK.testIgnoredReceived)
assert(!k.theTestThisCalled)
assert(!k.theTestThatCalled)
assert(!k.theTestTheOtherCalled)
}
it("should return the correct test count from its expectedTestCount method") {
val a = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
property("test this") { fixture => /* ASSERTION_SUCCEED */ }
property("test that") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(a.expectedTestCount(Filter()) === 2)
val b = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
ignore("test this") { fixture => /* ASSERTION_SUCCEED */ }
property("test that") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(b.expectedTestCount(Filter()) === 1)
val c = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
property("test this", mytags.FastAsLight) { fixture => /* ASSERTION_SUCCEED */ }
property("test that") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(c.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(c.expectedTestCount(Filter(None, Set("org.scalatest.FastAsLight"))) === 1)
val d = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
property("test this", mytags.FastAsLight, mytags.SlowAsMolasses) { fixture => /* ASSERTION_SUCCEED */ }
property("test that", mytags.SlowAsMolasses) { fixture => /* ASSERTION_SUCCEED */ }
property("test the other thing") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) === 1)
assert(d.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) === 1)
assert(d.expectedTestCount(Filter()) === 3)
val e = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
property("test this", mytags.FastAsLight, mytags.SlowAsMolasses) { fixture => /* ASSERTION_SUCCEED */ }
property("test that", mytags.SlowAsMolasses) { fixture => /* ASSERTION_SUCCEED */ }
ignore("test the other thing") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) === 1)
assert(e.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) === 0)
assert(e.expectedTestCount(Filter()) === 2)
val f = new Suites(a, b, c, d, e)
assert(f.expectedTestCount(Filter()) === 10)
}
it("should return the correct test count from its expectedTestCount method when uses registerTest and registerIgnoredTest to register tests") {
val a = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("test this") { fixture => /* ASSERTION_SUCCEED */ }
registerTest("test that") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(a.expectedTestCount(Filter()) == 2)
val b = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerIgnoredTest("test this") { fixture => /* ASSERTION_SUCCEED */ }
registerTest("test that") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(b.expectedTestCount(Filter()) == 1)
val c = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("test this", mytags.FastAsLight) { fixture => /* ASSERTION_SUCCEED */ }
registerTest("test that") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(c.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(c.expectedTestCount(Filter(None, Set("org.scalatest.FastAsLight"))) == 1)
val d = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("test this", mytags.FastAsLight, mytags.SlowAsMolasses) { fixture => /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => /* ASSERTION_SUCCEED */ }
registerTest("test the other thing") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) == 1)
assert(d.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) == 1)
assert(d.expectedTestCount(Filter()) == 3)
val e = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("test this", mytags.FastAsLight, mytags.SlowAsMolasses) { fixture => /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => /* ASSERTION_SUCCEED */ }
registerIgnoredTest("test the other thing") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) == 1)
assert(e.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) == 0)
assert(e.expectedTestCount(Filter()) == 2)
val f = new Suites(a, b, c, d, e)
assert(f.expectedTestCount(Filter()) == 10)
}
it("should generate a TestPending message when the test body is (pending)") {
val a = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
property("should do this") (pending)
property("should do that") { fixture =>
assert(fixture === hello)
}
property("should do something else") { fixture =>
assert(fixture === hello)
pending
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testPendingEventsReceived
assert(tp.size === 2)
}
it("should allow tests without fixtures to be combined with tests with fixtures") {
class SpecA extends propspec.FixtureAnyPropSpec {
var theTestWithFixtureWasRun = false
var theTestWithoutFixtureWasRun = false
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
property("should do this") (pending)
property("should do that") { fixture =>
assert(fixture === hello)
theTestWithFixtureWasRun = true
/* ASSERTION_SUCCEED */
}
property("should do something else") { fixture =>
assert(fixture === hello)
pending
}
property("should do that without a fixture") { () =>
assert(2 + 2 === 4)
theTestWithoutFixtureWasRun = true
/* ASSERTION_SUCCEED */
}
}
val a = new SpecA
import scala.language.reflectiveCalls
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testPendingEventsReceived
assert(tp.size === 2)
assert(a.theTestWithFixtureWasRun)
assert(a.theTestWithoutFixtureWasRun)
}
it("should generate a test failure if a Throwable, or an Error other than direct Error subtypes " +
"known in JDK 1.5, excluding AssertionError") {
val a = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
property("throws AssertionError") { s => throw new AssertionError }
property("throws plain old Error") { s => throw new Error }
property("throws Throwable") { s => throw new Throwable }
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tf = rep.testFailedEventsReceived
assert(tf.size === 3)
}
// SKIP-SCALATESTJS,NATIVE-START
it("should propagate out Errors that are direct subtypes of Error in JDK 1.5, other than " +
"AssertionError, causing Suites and Runs to abort.") {
val a = new propspec.FixtureAnyPropSpec {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
property("throws AssertionError") { s => throw new OutOfMemoryError }
}
intercept[OutOfMemoryError] {
a.run(None, Args(SilentReporter))
}
}
// SKIP-SCALATESTJS,NATIVE-END
it("should allow both tests that take fixtures and tests that don't") {
class SpecA extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = {
test("Hello, world!")
}
var takesNoArgsInvoked = false
property("take no args") { () => takesNoArgsInvoked = true; /* ASSERTION_SUCCEED */ }
var takesAFixtureInvoked = false
property("takes a fixture") { s => takesAFixtureInvoked = true; /* ASSERTION_SUCCEED */ }
}
val a = new SpecA
import scala.language.reflectiveCalls
a.run(None, Args(SilentReporter))
assert(a.testNames.size === 2, a.testNames)
assert(a.takesNoArgsInvoked)
assert(a.takesAFixtureInvoked)
}
it("should work with test functions whose inferred result type is not Unit") {
class SpecA extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = {
test("Hello, world!")
}
var takesNoArgsInvoked = false
property("take no args") { () => takesNoArgsInvoked = true; true; /* ASSERTION_SUCCEED */ }
var takesAFixtureInvoked = false
property("takes a fixture") { s => takesAFixtureInvoked = true; true; /* ASSERTION_SUCCEED */ }
}
val a = new SpecA
import scala.language.reflectiveCalls
assert(!a.takesNoArgsInvoked)
assert(!a.takesAFixtureInvoked)
a.run(None, Args(SilentReporter))
assert(a.testNames.size === 2, a.testNames)
assert(a.takesNoArgsInvoked)
assert(a.takesAFixtureInvoked)
}
it("should work with ignored tests whose inferred result type is not Unit") {
class SpecA extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
ignore("test this") { () => theTestThisCalled = true; "hi"; /* ASSERTION_SUCCEED */ }
ignore("test that") { fixture => theTestThatCalled = true; 42; /* ASSERTION_SUCCEED */ }
}
val a = new SpecA
import scala.language.reflectiveCalls
assert(!a.theTestThisCalled)
assert(!a.theTestThatCalled)
val reporter = new EventRecordingReporter
a.run(None, Args(reporter))
assert(reporter.testIgnoredEventsReceived.size === 2)
assert(!a.theTestThisCalled)
assert(!a.theTestThatCalled)
}
it("should pass a NoArgTest to withFixture for tests that take no fixture") {
class MySuite extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
var aNoArgTestWasPassed = false
var aOneArgTestWasPassed = false
override def withFixture(test: NoArgTest): Outcome = {
aNoArgTestWasPassed = true
Succeeded
}
def withFixture(test: OneArgTest): Outcome = {
aOneArgTestWasPassed = true
Succeeded
}
property("something") { () =>
assert(1 + 1 === 2)
}
}
val s = new MySuite
s.run(None, Args(SilentReporter))
assert(s.aNoArgTestWasPassed)
assert(!s.aOneArgTestWasPassed)
}
it("should not pass a NoArgTest to withFixture for tests that take a Fixture") {
class MySuite extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
var aNoArgTestWasPassed = false
var aOneArgTestWasPassed = false
override def withFixture(test: NoArgTest): Outcome = {
aNoArgTestWasPassed = true
Succeeded
}
def withFixture(test: OneArgTest): Outcome = {
aOneArgTestWasPassed = true
Succeeded
}
property("something") { fixture =>
assert(1 + 1 === 2)
}
}
val s = new MySuite
s.run(None, Args(SilentReporter))
assert(!s.aNoArgTestWasPassed)
assert(s.aOneArgTestWasPassed)
}
it("should pass a NoArgTest that invokes the no-arg test when the " +
"NoArgTest's no-arg apply method is invoked") {
class MySuite extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
var theNoArgTestWasInvoked = false
def withFixture(test: OneArgTest): Outcome = {
// Shouldn't be called, but just in case don't invoke a OneArgTest
Succeeded
}
property("something") { () =>
theNoArgTestWasInvoked = true
/* ASSERTION_SUCCEED */
}
}
val s = new MySuite
s.run(None, Args(SilentReporter))
assert(s.theNoArgTestWasInvoked)
}
it("should pass the correct test name in the OneArgTest passed to withFixture") {
class SpecA extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
var correctTestNameWasPassed = false
def withFixture(test: OneArgTest): Outcome = {
correctTestNameWasPassed = test.name == "something"
test("hi")
}
property("something") { fixture => /* ASSERTION_SUCCEED */ }
}
val a = new SpecA
import scala.language.reflectiveCalls
a.run(None, Args(SilentReporter))
assert(a.correctTestNameWasPassed)
}
it("should pass the correct config map in the OneArgTest passed to withFixture") {
class SpecA extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
var correctConfigMapWasPassed = false
def withFixture(test: OneArgTest): Outcome = {
correctConfigMapWasPassed = (test.configMap == ConfigMap("hi" -> 7))
test("hi")
}
property("something") { fixture => /* ASSERTION_SUCCEED */ }
}
val a = new SpecA
import scala.language.reflectiveCalls
a.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap("hi" -> 7), None, new Tracker(), Set.empty))
assert(a.correctConfigMapWasPassed)
}
describe("(when a nesting rule has been violated)") {
it("should, if they call a nested it from within an it clause, result in a TestFailedException when running the test") {
class MySuite extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
property("should blow up") { fixture =>
property("should never run") { fixture =>
assert(1 === 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySuite
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested it with tags from within an it clause, result in a TestFailedException when running the test") {
class MySuite extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
property("should blow up") { fixture =>
property("should never run", mytags.SlowAsMolasses) { fixture =>
assert(1 == 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySuite
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested registerTest with tags from within a registerTest clause, result in a TestFailedException when running the test") {
class MySuite extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("should blow up") { fixture =>
registerTest("should never run", mytags.SlowAsMolasses) { fixture =>
assert(1 == 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySuite
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested ignore from within an it clause, result in a TestFailedException when running the test") {
class MySuite extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
property("should blow up") { fixture =>
ignore("should never run") { fixture =>
assert(1 === 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySuite
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested ignore with tags from within an it clause, result in a TestFailedException when running the test") {
class MySuite extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
property("should blow up") { fixture =>
ignore("should never run", mytags.SlowAsMolasses) { fixture =>
assert(1 == 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySuite
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested registerIgnoredTest with tags from within a registerTest clause, result in a TestFailedException when running the test") {
class MySuite extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("should blow up") { fixture =>
registerIgnoredTest("should never run", mytags.SlowAsMolasses) { fixture =>
assert(1 == 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySuite
ensureTestFailedEventReceived(spec, "should blow up")
}
}
it("should throw IllegalArgumentException if passed a testName that doesn't exist") {
class MySuite extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = {
test("hi")
}
property("one") {s => (); /* ASSERTION_SUCCEED */ }
property("two") {s => (); /* ASSERTION_SUCCEED */ }
}
val suite = new MySuite
intercept[IllegalArgumentException] {
suite.run(Some("three"), Args(SilentReporter))
}
}
it("should allow test registration with registerTest and registerIgnoredTest") {
class TestSpec extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = {
test("hi")
}
val a = 1
registerTest("test 1") { fixture =>
val e = intercept[TestFailedException] {
assert(a == 2)
}
assert(e.message == Some("1 did not equal 2"))
assert(e.failedCodeFileName == Some("FixturePropSpecSpec.scala"))
assert(e.failedCodeLineNumber == Some(thisLineNumber - 4))
}
registerTest("test 2") { fixture =>
assert(a == 2)
}
registerTest("test 3") { fixture =>
pending
}
registerTest("test 4") { fixture =>
cancel()
}
registerIgnoredTest("test 5") { fixture =>
assert(a == 2)
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "test 5")
}
ignore("should support expectations") { // Unignore after we uncomment the expectation implicits in RegistrationPolicy
class TestSpec extends propspec.FixtureAnyPropSpec with expectations.Expectations {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
property("fail scenario") { () =>
expect(1 === 2); /* ASSERTION_SUCCEED */
}
property("nested fail scenario") { fixture =>
expect(1 === 2); /* ASSERTION_SUCCEED */
}
}
val rep = new EventRecordingReporter
val s1 = new TestSpec
s1.run(None, Args(rep))
assert(rep.testFailedEventsReceived.size === 2)
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "FixturePropSpecSpec.scala")
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 11)
assert(rep.testFailedEventsReceived(1).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "FixturePropSpecSpec.scala")
assert(rep.testFailedEventsReceived(1).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 10)
}
}
describe("when failure happens") {
it("should fire TestFailed event with correct stack depth info when test failed") {
class TestSpec extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = {
test("hi")
}
property("fail scenario") { fixture =>
assert(1 === 2)
}
}
val rep = new EventRecordingReporter
val s1 = new TestSpec
s1.run(None, Args(rep))
assert(rep.testFailedEventsReceived.size === 1)
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "FixturePropSpecSpec.scala")
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 8)
}
it("should generate TestRegistrationClosedException with correct stack depth info when has a property nested inside a property") {
class TestSpec extends propspec.FixtureAnyPropSpec {
var registrationClosedThrown = false
type FixtureParam = String
property("a scenario") { fixture =>
property("nested scenario") { fixture =>
assert(1 == 2)
}; /* ASSERTION_SUCCEED */
}
override def withFixture(test: OneArgTest): Outcome = {
val outcome = test.apply("hi")
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FixturePropSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 23)
assert(trce.message == Some("A property clause may not appear inside another property clause."))
}
it("should generate TestRegistrationClosedException with correct stack depth info when has an ignore nested inside a property") {
class TestSpec extends propspec.FixtureAnyPropSpec {
var registrationClosedThrown = false
type FixtureParam = String
property("a scenario") { fixture =>
ignore("nested scenario") { fixture =>
assert(1 == 2)
}; /* ASSERTION_SUCCEED */
}
override def withFixture(test: OneArgTest): Outcome = {
val outcome = test.apply("hi")
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FixturePropSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 23)
assert(trce.message == Some("An ignore clause may not appear inside a property clause."))
}
it("should generate TestRegistrationClosedException with correct stack depth info when has a registerTest nested inside a registerTest") {
class TestSpec extends propspec.FixtureAnyPropSpec {
var registrationClosedThrown = false
type FixtureParam = String
registerTest("a scenario") { fixture =>
registerTest("nested scenario") { fixture =>
assert(1 == 2)
}; /* ASSERTION_SUCCEED */
}
override def withFixture(test: OneArgTest): Outcome = {
val outcome = test.apply("hi")
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FixturePropSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 23)
assert(trce.message == Some("Test cannot be nested inside another test."))
}
it("should generate TestRegistrationClosedException with correct stack depth info when has a registerIgnoredTest nested inside a registerTest") {
class TestSpec extends propspec.FixtureAnyPropSpec {
var registrationClosedThrown = false
type FixtureParam = String
registerTest("a scenario") { fixture =>
registerIgnoredTest("nested scenario") { fixture =>
assert(1 == 2)
}; /* ASSERTION_SUCCEED */
}
override def withFixture(test: OneArgTest): Outcome = {
val outcome = test.apply("hi")
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FixturePropSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 23)
assert(trce.message == Some("Test cannot be nested inside another test."))
}
it("should generate a DuplicateTestNameException when duplicate test name is detected") {
class TestSpec extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
property("test 1") { fixture => }
property("test 1") { fixture => }
}
val e = intercept[DuplicateTestNameException] {
new TestSpec
}
assert("FixturePropSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 6)
assert(!e.cause.isDefined)
}
it("should generate a DuplicateTestNameException when duplicate test name is detected when use ignore") {
class TestSpec extends propspec.FixtureAnyPropSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
property("test 1") { fixture => }
ignore("test 1") { fixture => }
}
val e = intercept[DuplicateTestNameException] {
new TestSpec
}
assert("FixturePropSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 6)
assert(!e.cause.isDefined)
}
}
}
| scalatest/scalatest | jvm/propspec-test/src/test/scala/org/scalatest/propspec/FixturePropSpecSpec.scala | Scala | apache-2.0 | 67,116 |
package com.zola
import org.scalatest._
class ProgramSpec extends FlatSpec with Matchers {
"The Hello object" should "say hello" in { }
}
| ValentinRutz/zola | src/test/scala/zola/ProgramSpec.scala | Scala | apache-2.0 | 143 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.optimizer
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions.Literal
import org.apache.spark.sql.catalyst.expressions.Literal.FalseLiteral
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, LogicalPlan, Project}
import org.apache.spark.sql.catalyst.rules.RuleExecutor
import org.apache.spark.sql.types.{IntegerType, MetadataBuilder, StructType}
class PropagateEmptyRelationSuite extends PlanTest {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
Batch("PropagateEmptyRelation", Once,
CombineUnions,
ReplaceDistinctWithAggregate,
ReplaceExceptWithAntiJoin,
ReplaceIntersectWithSemiJoin,
PushPredicateThroughNonJoin,
PruneFilters,
PropagateEmptyRelation,
CollapseProject) :: Nil
}
object OptimizeWithoutPropagateEmptyRelation extends RuleExecutor[LogicalPlan] {
val batches =
Batch("OptimizeWithoutPropagateEmptyRelation", Once,
CombineUnions,
ReplaceDistinctWithAggregate,
ReplaceExceptWithAntiJoin,
ReplaceIntersectWithSemiJoin,
PushPredicateThroughNonJoin,
PruneFilters,
CollapseProject) :: Nil
}
val testRelation1 = LocalRelation.fromExternalRows(Seq('a.int), data = Seq(Row(1)))
val testRelation2 = LocalRelation.fromExternalRows(Seq('b.int), data = Seq(Row(1)))
val metadata = new MetadataBuilder().putLong("test", 1).build()
val testRelation3 =
LocalRelation.fromExternalRows(Seq('c.int.notNull.withMetadata(metadata)), data = Seq(Row(1)))
test("propagate empty relation through Union") {
val query = testRelation1
.where(false)
.union(testRelation2.where(false))
val optimized = Optimize.execute(query.analyze)
val correctAnswer = LocalRelation('a.int)
comparePlans(optimized, correctAnswer)
}
test("SPARK-32241: remove empty relation children from Union") {
val query = testRelation1.union(testRelation2.where(false))
val optimized = Optimize.execute(query.analyze)
val correctAnswer = testRelation1
comparePlans(optimized, correctAnswer)
val query2 = testRelation1.where(false).union(testRelation2)
val optimized2 = Optimize.execute(query2.analyze)
val correctAnswer2 = testRelation2.select('b.as('a)).analyze
comparePlans(optimized2, correctAnswer2)
val query3 = testRelation1.union(testRelation2.where(false)).union(testRelation3)
val optimized3 = Optimize.execute(query3.analyze)
val correctAnswer3 = testRelation1.union(testRelation3)
comparePlans(optimized3, correctAnswer3)
val query4 = testRelation1.where(false).union(testRelation2).union(testRelation3)
val optimized4 = Optimize.execute(query4.analyze)
val correctAnswer4 = testRelation2.union(testRelation3).select('b.as('a)).analyze
comparePlans(optimized4, correctAnswer4)
// Nullability can change from nullable to non-nullable
val query5 = testRelation1.where(false).union(testRelation3)
val optimized5 = Optimize.execute(query5.analyze)
assert(query5.output.head.nullable, "Original output should be nullable")
assert(!optimized5.output.head.nullable, "New output should be non-nullable")
// Keep metadata
val query6 = testRelation3.where(false).union(testRelation1)
val optimized6 = Optimize.execute(query6.analyze)
assert(optimized6.output.head.metadata == metadata, "New output should keep metadata")
}
test("propagate empty relation through Join") {
// Testcases are tuples of (left predicate, right predicate, joinType, correct answer)
// Note that `None` is used to compare with OptimizeWithoutPropagateEmptyRelation.
val testcases = Seq(
(true, true, Inner, None),
(true, true, Cross, None),
(true, true, LeftOuter, None),
(true, true, RightOuter, None),
(true, true, FullOuter, None),
(true, true, LeftAnti, None),
(true, true, LeftSemi, None),
(true, false, Inner, Some(LocalRelation('a.int, 'b.int))),
(true, false, Cross, Some(LocalRelation('a.int, 'b.int))),
(true, false, LeftOuter,
Some(Project(Seq('a, Literal(null).cast(IntegerType).as('b)), testRelation1).analyze)),
(true, false, RightOuter, Some(LocalRelation('a.int, 'b.int))),
(true, false, FullOuter,
Some(Project(Seq('a, Literal(null).cast(IntegerType).as('b)), testRelation1).analyze)),
(true, false, LeftAnti, Some(testRelation1)),
(true, false, LeftSemi, Some(LocalRelation('a.int))),
(false, true, Inner, Some(LocalRelation('a.int, 'b.int))),
(false, true, Cross, Some(LocalRelation('a.int, 'b.int))),
(false, true, LeftOuter, Some(LocalRelation('a.int, 'b.int))),
(false, true, RightOuter,
Some(Project(Seq(Literal(null).cast(IntegerType).as('a), 'b), testRelation2).analyze)),
(false, true, FullOuter,
Some(Project(Seq(Literal(null).cast(IntegerType).as('a), 'b), testRelation2).analyze)),
(false, true, LeftAnti, Some(LocalRelation('a.int))),
(false, true, LeftSemi, Some(LocalRelation('a.int))),
(false, false, Inner, Some(LocalRelation('a.int, 'b.int))),
(false, false, Cross, Some(LocalRelation('a.int, 'b.int))),
(false, false, LeftOuter, Some(LocalRelation('a.int, 'b.int))),
(false, false, RightOuter, Some(LocalRelation('a.int, 'b.int))),
(false, false, FullOuter, Some(LocalRelation('a.int, 'b.int))),
(false, false, LeftAnti, Some(LocalRelation('a.int))),
(false, false, LeftSemi, Some(LocalRelation('a.int)))
)
testcases.foreach { case (left, right, jt, answer) =>
val query = testRelation1
.where(left)
.join(testRelation2.where(right), joinType = jt, condition = Some('a.attr === 'b.attr))
val optimized = Optimize.execute(query.analyze)
val correctAnswer =
answer.getOrElse(OptimizeWithoutPropagateEmptyRelation.execute(query.analyze))
comparePlans(optimized, correctAnswer)
}
}
test("SPARK-28220: Propagate empty relation through Join if condition is FalseLiteral") {
val testcases = Seq(
(Inner, Some(LocalRelation('a.int, 'b.int))),
(Cross, Some(LocalRelation('a.int, 'b.int))),
(LeftOuter,
Some(Project(Seq('a, Literal(null).cast(IntegerType).as('b)), testRelation1).analyze)),
(RightOuter,
Some(Project(Seq(Literal(null).cast(IntegerType).as('a), 'b), testRelation2).analyze)),
(FullOuter, None),
(LeftAnti, Some(testRelation1)),
(LeftSemi, Some(LocalRelation('a.int)))
)
testcases.foreach { case (jt, answer) =>
val query = testRelation1.join(testRelation2, joinType = jt, condition = Some(FalseLiteral))
val optimized = Optimize.execute(query.analyze)
val correctAnswer =
answer.getOrElse(OptimizeWithoutPropagateEmptyRelation.execute(query.analyze))
comparePlans(optimized, correctAnswer)
}
}
test("propagate empty relation through UnaryNode") {
val query = testRelation1
.where(false)
.select('a)
.groupBy('a)('a)
.where('a > 1)
.orderBy('a.asc)
val optimized = Optimize.execute(query.analyze)
val correctAnswer = LocalRelation('a.int)
comparePlans(optimized, correctAnswer)
}
test("propagate empty streaming relation through multiple UnaryNode") {
val output = Seq('a.int)
val data = Seq(Row(1))
val schema = StructType.fromAttributes(output)
val converter = CatalystTypeConverters.createToCatalystConverter(schema)
val relation = LocalRelation(
output,
data.map(converter(_).asInstanceOf[InternalRow]),
isStreaming = true)
val query = relation
.where(false)
.select('a)
.where('a > 1)
.where('a =!= 200)
.orderBy('a.asc)
val optimized = Optimize.execute(query.analyze)
val correctAnswer = LocalRelation(output, isStreaming = true)
comparePlans(optimized, correctAnswer)
}
test("don't propagate empty streaming relation through agg") {
val output = Seq('a.int)
val data = Seq(Row(1))
val schema = StructType.fromAttributes(output)
val converter = CatalystTypeConverters.createToCatalystConverter(schema)
val relation = LocalRelation(
output,
data.map(converter(_).asInstanceOf[InternalRow]),
isStreaming = true)
val query = relation
.groupBy('a)('a)
val optimized = Optimize.execute(query.analyze)
val correctAnswer = query.analyze
comparePlans(optimized, correctAnswer)
}
test("don't propagate non-empty local relation") {
val query = testRelation1
.where(true)
.groupBy('a)('a)
.where('a > 1)
.orderBy('a.asc)
.select('a)
val optimized = Optimize.execute(query.analyze)
val correctAnswer = testRelation1
.where('a > 1)
.groupBy('a)('a)
.orderBy('a.asc)
.select('a)
comparePlans(optimized, correctAnswer.analyze)
}
test("propagate empty relation through Aggregate with grouping expressions") {
val query = testRelation1
.where(false)
.groupBy('a)('a, ('a + 1).as('x))
val optimized = Optimize.execute(query.analyze)
val correctAnswer = LocalRelation('a.int, 'x.int).analyze
comparePlans(optimized, correctAnswer)
}
test("don't propagate empty relation through Aggregate without grouping expressions") {
val query = testRelation1
.where(false)
.groupBy()()
val optimized = Optimize.execute(query.analyze)
val correctAnswer = LocalRelation('a.int).groupBy()().analyze
comparePlans(optimized, correctAnswer)
}
test("propagate empty relation keeps the plan resolved") {
val query = testRelation1.join(
LocalRelation('a.int, 'b.int), UsingJoin(FullOuter, "a" :: Nil), None)
val optimized = Optimize.execute(query.analyze)
assert(optimized.resolved)
}
test("should not optimize away limit if streaming") {
val query = LocalRelation(Nil, Nil, isStreaming = true).limit(1).analyze
val optimized = Optimize.execute(query)
comparePlans(optimized, query)
}
}
| wangmiao1981/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/PropagateEmptyRelationSuite.scala | Scala | apache-2.0 | 11,155 |
package io
import java.io.{BufferedOutputStream, File, FileOutputStream}
import communication.FileManifest
import scala.collection.mutable.ListBuffer
/**
* Created by marcin on 5/10/17.
*/
/** Builds file from Chunks.
*
* @param fileManifest manifest of file's chunks
* @param homeDirPath path to home directory of given file in filesystem
*/
class FileBuilder(fileManifest: FileManifest)(implicit homeDirPath: String) {
private val chunksBuffer: ListBuffer[Chunk] = ListBuffer()
def chunks: List[Chunk] = chunksBuffer.toList.sortBy(_.id)
def accept(chunk: Chunk): Unit = {
if (!chunksBuffer.exists(_.id == chunk.id)) chunksBuffer += chunk
}
def missingChunks: List[Int] = {
val missing: ListBuffer[Int] = ListBuffer()
for (i <- 0 until fileManifest.chunkCount) {
if (!chunksBuffer.exists(_.id == i)) missing += i
}
missing.toList
}
def build(): Unit = {
val name = fileManifest.fileIndex.path
val newFile = new File(homeDirPath + "/" + name)
newFile.getParentFile.mkdirs()
val writer = new BufferedOutputStream(new FileOutputStream(newFile))
(0 until fileManifest.chunkCount)
.foreach { i => writer.write(chunks.filter(_.id == i).head.content) }
writer.close()
newFile.setWritable(fileManifest.fileIndex.writable)
newFile.setReadable(fileManifest.fileIndex.readable)
newFile.setExecutable(fileManifest.fileIndex.executable)
}
}
| mprzewie/cloudia-utils | src/main/scala/io/FileBuilder.scala | Scala | mit | 1,439 |
package db
import javax.inject.{Inject, Singleton}
import io.flow.dependency.v0.models.UserForm
import io.flow.dependency.actors.UserActor
import io.flow.postgresql.{OrderBy, Query}
import io.flow.common.v0.models.{User, UserReference}
import io.flow.util.IdGenerator
import anorm._
import com.google.inject.ImplementedBy
import play.api.db._
@ImplementedBy(classOf[UsersDao])
trait StaticUserProvider {
def systemUser: UserReference
def anonymousUser: UserReference
}
@Singleton
class UsersDao @Inject()(
db: Database,
@javax.inject.Named("user-actor") userActor: akka.actor.ActorRef
) extends StaticUserProvider {
private[db] val SystemEmailAddress = "system@bryzek.com"
private[db] val AnonymousEmailAddress = "anonymous@bryzek.com"
override lazy val systemUser = UserReference(
id = findAll(email = Some(SystemEmailAddress), limit = 1).headOption.map(_.id).getOrElse {
sys.error(s"Could not find system user[$SystemEmailAddress]")
}
)
override lazy val anonymousUser = UserReference(
id = findAll(email = Some(AnonymousEmailAddress), limit = 1).headOption.map(_.id).getOrElse {
sys.error(s"Could not find anonymous user[$AnonymousEmailAddress]")
}
)
private[this] val BaseQuery = Query(s"""
select users.id,
users.email,
users.first_name as name_first,
users.last_name as name_last,
users.avatar_url,
users.status
from users
""")
private[this] val InsertQuery = """
insert into users
(id, email, first_name, last_name, updated_by_user_id, status)
values
({id}, {email}, {first_name}, {last_name}, {updated_by_user_id}, {status})
"""
def validate(form: UserForm): Seq[String] = {
form.email match {
case None => {
Nil
}
case Some(email) => {
if (email.trim.isEmpty) {
Seq("Email address cannot be empty")
} else if (!isValidEmail(email)) {
Seq("Please enter a valid email address")
} else {
findByEmail(email) match {
case None => Nil
case Some(_) => Seq("Email is already registered")
}
}
}
}
}
private def isValidEmail(email: String): Boolean = {
email.indexOf("@") >= 0
}
def create(createdBy: Option[UserReference], form: UserForm): Either[Seq[String], User] = {
validate(form) match {
case Nil => {
val id = IdGenerator("usr").randomId()
db.withConnection { implicit c =>
SQL(InsertQuery).on(
Symbol("id") -> id,
Symbol("email") -> form.email.map(_.trim),
Symbol("first_name") -> Util.trimmedString(form.name.flatMap(_.first)),
Symbol("last_name") -> Util.trimmedString(form.name.flatMap(_.last)),
Symbol("updated_by_user_id") -> createdBy.getOrElse(anonymousUser).id,
Symbol("status") -> Option("inactive")
).execute()
}
userActor ! UserActor.Messages.Created(id.toString)
Right(
findById(id).getOrElse {
sys.error("Failed to create user")
}
)
}
case errors => Left(errors)
}
}
def findByGithubUserId(githubUserId: Long): Option[User] = {
findAll(githubUserId = Some(githubUserId), limit = 1).headOption
}
def findByEmail(email: String): Option[User] = {
findAll(email = Some(email), limit = 1).headOption
}
def findByToken(token: String): Option[User] = {
findAll(token = Some(token), limit = 1).headOption
}
def findById(id: String): Option[User] = {
findAll(id = Some(id), limit = 1).headOption
}
def findByIdentifier(identifier: String): Option[User] = {
findAll(identifier = Some(identifier), limit = 1).headOption
}
def findAll(
id: Option[String] = None,
ids: Option[Seq[String]] = None,
email: Option[String] = None,
token: Option[String] = None,
identifier: Option[String] = None,
githubUserId: Option[Long] = None,
orderBy: OrderBy = OrderBy("users.created_at"),
limit: Long = 25,
offset: Long = 0
): Seq[User] = {
db.withConnection { implicit c =>
Standards.query(
BaseQuery,
tableName = "users",
auth = Clause.True, // TODO
id = id,
ids = ids,
orderBy = orderBy.sql,
limit = limit,
offset = offset
).
optionalText(
"users.email",
email,
columnFunctions = Seq(Query.Function.Lower),
valueFunctions = Seq(Query.Function.Lower, Query.Function.Trim)
).
and(
identifier.map { _ =>
"users.id in (select user_id from user_identifiers where value = trim({identifier}))"
}
).bind("identifier", identifier).
and(
token.map { _ =>
"users.id in (select user_id from tokens where token = trim({token}))"
}
).bind("token", token).
and(
githubUserId.map { _ =>
"users.id in (select user_id from github_users where github_user_id = {github_user_id}::numeric)"
}
).bind("github_user_id", githubUserId).
as(
io.flow.common.v0.anorm.parsers.User.parser().*
)
}
}
}
| flowcommerce/dependency | api/app/db/UsersDao.scala | Scala | mit | 5,270 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.index
import java.util.concurrent.ConcurrentHashMap
import com.typesafe.scalalogging.StrictLogging
import com.vividsolutions.jts.geom.Envelope
import org.locationtech.geomesa.utils.geotools.GridSnap
import scala.annotation.tailrec
/**
* Creates an index suitable for geometries with extents. The index is broken up into tiers, where each
* geometry is assigned a tier based on its envelope size. When querying, each tier must be evaluated, so
* if possible tiers should be matched closely to the envelopes of the entries.
*
* Values are indexed by the centroid of their envelope. When querying, the bounding box is expanded based
* on the max envelope size of the tier, to ensure that all potential results are found. Thus, the false
* positive rate tends to go up with larger tiers, and post-filtering is recommended.
*
* @param sizes (x, y) max envelope size for each tier
* @param xBucketMultiplier multiplier for number of x buckets to create per tier
* @param yBucketMultiplier multiplier for number of y buckets to create per tier
* @param extents total area being indexed
* @tparam T item type
*/
class SizeSeparatedBucketIndex[T](sizes: Seq[(Double, Double)] = SizeSeparatedBucketIndex.DefaultTiers,
xBucketMultiplier: Double = 1,
yBucketMultiplier: Double = 1,
extents: Envelope = new Envelope(-180.0, 180.0, -90.0, 90.0))
extends SpatialIndex[T] with StrictLogging {
require(sizes.nonEmpty, "No valid tier sizes specified")
require(sizes.lengthCompare(1) == 0 ||
sizes.sliding(2).forall { case Seq((x1, y1), (x2, y2)) => x1 <= x2 && y1 <= y2 },
"Tiers must be ordered by increasing size")
// note: for point ops, we always use the first (smallest) tier
private val tiers = sizes.map { case (width, height) =>
val xSize = math.ceil(extents.getWidth * xBucketMultiplier / width).toInt
val ySize = math.ceil(extents.getHeight * yBucketMultiplier / height).toInt
// create the buckets up front to avoid having to synchronize the whole array
// we use a ConcurrentHashMap, which gives us iterators that aren't affected by modifications to the backing map
val buckets = Array.fill(xSize, ySize)(new ConcurrentHashMap[String, T]())
logger.debug(s"Creating tier for size ($width $height) with buckets [${xSize}x$ySize]")
new Tier(width, height, buckets, new GridSnap(extents, xSize, ySize))
}
override def insert(x: Double, y: Double, key: String, item: T): Unit = {
val tier = tiers.head
// volatile reads should be cheaper than writes, so only update the variable if necessary
if (tier.empty) {
tier.empty = false
}
tier.bucket(x, y).put(key, item)
}
override def insert(envelope: Envelope, key: String, item: T): Unit = {
val tier = selectTier(envelope)
// volatile reads should be cheaper than writes, so only update the variable if necessary
if (tier.empty) {
tier.empty = false
}
tier.bucket(envelope).put(key, item)
}
override def remove(x: Double, y: Double, key: String): T = tiers.head.bucket(x, y).remove(key)
override def remove(envelope: Envelope, key: String): T = selectTier(envelope).bucket(envelope).remove(key)
// use first (smallest) tier for point operations
override def get(x: Double, y: Double, key: String): T = tiers.head.bucket(x, y).get(key)
override def get(envelope: Envelope, key: String): T = selectTier(envelope).bucket(envelope).get(key)
override def query(xmin: Double, ymin: Double, xmax: Double, ymax: Double): Iterator[T] =
tiers.iterator.flatMap(_.iterator(xmin, ymin, xmax, ymax))
override def query(): Iterator[T] = query(extents.getMinX, extents.getMinY, extents.getMaxX, extents.getMaxY)
override def size(): Int = {
var size = 0
tiers.foreach(tier => size += tier.size())
size
}
override def clear(): Unit = tiers.foreach(_.clear())
private def selectTier(envelope: Envelope): Tier = {
val width = envelope.getWidth
val height = envelope.getHeight
tiers.find(t => t.maxSizeX >= width && t.maxSizeY >= height).getOrElse {
throw new IllegalArgumentException(s"Envelope $envelope exceeds the max tier size ${sizes.last}")
}
}
private class Tier(val maxSizeX: Double,
val maxSizeY: Double,
buckets: Array[Array[ConcurrentHashMap[String, T]]],
gridSnap: GridSnap) {
// we can safely use volatile instead of synchronized here, as this is a primitive boolean whose
// state doesn't depend on its own value
@volatile
var empty: Boolean = true
private val maxX = buckets.length - 1
private val maxY = buckets(0).length - 1
def bucket(x: Double, y: Double): ConcurrentHashMap[String, T] = buckets(snapX(x))(snapY(y))
// the bucket is selected based on the envelope centroid
def bucket(envelope: Envelope): ConcurrentHashMap[String, T] =
buckets(snapX((envelope.getMinX + envelope.getMaxX) / 2.0))(snapY((envelope.getMinY + envelope.getMaxY) / 2.0))
def iterator(xmin: Double, ymin: Double, xmax: Double, ymax: Double): Iterator[T] =
if (empty) { Iterator.empty } else { new TierIterator(xmin, ymin, xmax, ymax) }
def size(): Int = {
if (empty) { 0 } else {
var size = 0
var i = 0
while (i <= maxX) {
var j = 0
while (j <= maxY) {
size += buckets(i)(j).size()
j += 1
}
i += 1
}
size
}
}
def clear(): Unit = {
var i = 0
while (i <= maxX) {
var j = 0
while (j <= maxY) {
buckets(i)(j).clear()
j += 1
}
i += 1
}
}
private def snapX(x: Double): Int = {
val i = gridSnap.i(x)
if (i != -1) { i } else if (x < extents.getMinX) { 0 } else { maxX }
}
private def snapY(y: Double): Int = {
val j = gridSnap.j(y)
if (j != -1) { j } else if (y < extents.getMinY) { 0 } else { maxY }
}
/**
* Iterator over a range of buckets
*/
class TierIterator (xmin: Double, ymin: Double, xmax: Double, ymax: Double) extends Iterator[T] {
private val maxi = snapX(xmax + maxSizeX)
private val minj = snapY(ymin - maxSizeY)
private val maxj = snapY(ymax + maxSizeY)
private var i = snapX(xmin - maxSizeX)
private var j = minj
private var iter = buckets(i)(j).values.iterator() // note: `.values` is a cached view
@tailrec
override final def hasNext: Boolean = iter.hasNext || {
if (i == maxi && j == maxj) { false } else {
if (j < maxj) {
j += 1
} else {
j = minj
i += 1
}
iter = buckets(i)(j).values.iterator() // note: `.values` is a cached view
hasNext
}
}
override def next(): T = iter.next()
}
}
}
object SizeSeparatedBucketIndex {
// TODO https://geomesa.atlassian.net/browse/GEOMESA-2322 these are somewhat arbitrary
val DefaultTiers: Seq[(Double, Double)] = Seq((1, 1), (4, 4), (32, 32), (360, 180))
}
| ddseapy/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/index/SizeSeparatedBucketIndex.scala | Scala | apache-2.0 | 7,711 |
package edu.rice.habanero.benchmarks.banking
import edu.rice.habanero.actors.{FuncJavaActor, FuncJavaActorState, FuncJavaPool}
import edu.rice.habanero.benchmarks.banking.BankingConfig._
import edu.rice.habanero.benchmarks.{Benchmark, BenchmarkRunner, PseudoRandom}
import scala.concurrent.Promise
/**
*
* @author <a href="http://shams.web.rice.edu/">Shams Imam</a> (shams@rice.edu)
*/
object BankingFuncJavaActorBenchmark {
def main(args: Array[String]) {
BenchmarkRunner.runBenchmark(args, new BankingFuncJavaActorBenchmark)
}
private final class BankingFuncJavaActorBenchmark extends Benchmark {
def initialize(args: Array[String]) {
BankingConfig.parseArgs(args)
}
def printArgInfo() {
BankingConfig.printArgs()
}
def runIteration() {
val master = new Teller(BankingConfig.A, BankingConfig.N)
master.start()
master.send(StartMessage.ONLY)
FuncJavaActorState.awaitTermination()
}
def cleanupIteration(lastIteration: Boolean, execTimeMillis: Double) {
if (lastIteration) {
FuncJavaPool.shutdown()
}
}
}
protected class Teller(numAccounts: Int, numBankings: Int) extends FuncJavaActor[AnyRef] {
private val self = this
private val accounts = Array.tabulate[Account](numAccounts)((i) => {
new Account(i, BankingConfig.INITIAL_BALANCE)
})
private var numCompletedBankings = 0
private val randomGen = new PseudoRandom(123456)
protected override def onPostStart() {
accounts.foreach(loopAccount => loopAccount.start())
}
override def process(theMsg: AnyRef) {
theMsg match {
case sm: BankingConfig.StartMessage =>
var m = 0
while (m < numBankings) {
generateWork()
m += 1
}
case sm: BankingConfig.ReplyMessage =>
numCompletedBankings += 1
if (numCompletedBankings == numBankings) {
accounts.foreach(loopAccount => loopAccount.send(StopMessage.ONLY))
exit()
}
case message =>
val ex = new IllegalArgumentException("Unsupported message: " + message)
ex.printStackTrace(System.err)
}
}
def generateWork(): Unit = {
// src is lower than dest id to ensure there is never a deadlock
val srcAccountId = randomGen.nextInt((accounts.length / 10) * 8)
var loopId = randomGen.nextInt(accounts.length - srcAccountId)
if (loopId == 0) {
loopId += 1
}
val destAccountId = srcAccountId + loopId
val srcAccount = accounts(srcAccountId)
val destAccount = accounts(destAccountId)
val amount = Math.abs(randomGen.nextDouble()) * 1000
val sender = self
val cm = new CreditMessage(sender, amount, destAccount)
srcAccount.send(cm)
}
}
protected class Account(id: Int, var balance: Double) extends FuncJavaActor[AnyRef] {
override def process(theMsg: AnyRef) {
theMsg match {
case dm: DebitMessage =>
balance += dm.amount
val creditor = dm.sender.asInstanceOf[Promise[ReplyMessage]]
creditor.success(ReplyMessage.ONLY)
case cm: CreditMessage =>
balance -= cm.amount
val teller = cm.sender.asInstanceOf[FuncJavaActor[AnyRef]]
val sender = Promise[ReplyMessage]()
val destAccount = cm.recipient.asInstanceOf[Account]
destAccount.send(new DebitMessage(sender, cm.amount))
FuncJavaPool.await[ReplyMessage](sender)
teller.send(ReplyMessage.ONLY)
case _: StopMessage =>
exit()
case message =>
val ex = new IllegalArgumentException("Unsupported message: " + message)
ex.printStackTrace(System.err)
}
}
}
}
| shamsmahmood/savina | src/main/scala/edu/rice/habanero/benchmarks/banking/BankingFuncJavaActorBenchmark.scala | Scala | gpl-2.0 | 3,802 |
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.version
object HmrcReturns {
case object Computations extends ReturnType {
override def key(): String = "Computations"
}
case object CT600 extends ReturnType {
override def key(): String = "CT600"
}
case object CT600a extends ReturnType {
override def key(): String = "CT600a"
}
case object CT600j extends ReturnType {
override def key(): String = "CT600j"
}
case object CT600e extends ReturnType {
override def key(): String = "CT600e"
}
case object HmrcUploadedAccounts extends ReturnType with Accounts {
override def key(): String = "HmrcUploadedAccounts"
}
case object HmrcMicroEntityAccounts extends ReturnType with Accounts {
override def key(): String = "HmrcMicroEntityAccounts"
}
case object HmrcStatutoryAccounts extends ReturnType with Accounts {
override def key(): String = "HmrcStatutoryAccounts"
}
val returns: Set[ReturnType] = Set(Computations, CT600, CT600a, CT600e, CT600j, HmrcMicroEntityAccounts, HmrcStatutoryAccounts, HmrcUploadedAccounts)
def fromKey(key: String): ReturnType = {
returns.find(_.key() == key).getOrElse(throw new IllegalArgumentException(s"Unknown key for HmrcReturn: $key"))
}
}
| ahudspith-equalexperts/ct-calculations | src/main/scala/uk/gov/hmrc/ct/version/HmrcReturns.scala | Scala | apache-2.0 | 1,832 |
package org.tribbloid.spookystuff.session
import java.util.Date
import java.util.concurrent.TimeUnit
import org.openqa.selenium.Dimension
import org.openqa.selenium.remote.SessionNotFoundException
import org.slf4j.LoggerFactory
import org.tribbloid.spookystuff.{Const, SpookyContext}
import org.tribbloid.spookystuff.actions._
import org.tribbloid.spookystuff.utils.Utils
import scala.collection.mutable.ArrayBuffer
//TODO: this should be minimized and delegated to resource pool
abstract class Session(val spooky: SpookyContext) {
spooky.metrics.sessionInitialized += 1
val startTime: Long = new Date().getTime
val backtrace: ArrayBuffer[Action] = ArrayBuffer()
val driver: CleanWebDriver
def close(): Unit = {
spooky.metrics.sessionReclaimed += 1
}
override def finalize(): Unit = {
try {
this.close()
LoggerFactory.getLogger(this.getClass).info("Session is finalized by GC")
}
catch {
case e: SessionNotFoundException => //already cleaned before
case e: Throwable =>
LoggerFactory.getLogger(this.getClass).warn("!!!!!FAIL TO CLEAN UP SESSION!!!!!" + e)
}
finally {
super.finalize()
}
// TODO: Runtime.getRuntime.addShutdownHook()
}
}
class DriverSession(override val spooky: SpookyContext) extends Session(spooky){
override val driver: CleanWebDriver = Utils.retry(Const.localResourceLocalRetry){
Utils.withDeadline(Const.sessionInitializationTimeout){
var successful = false
val driver = spooky.conf.driverFactory.newInstance(null, spooky)
spooky.metrics.driverInitialized += 1
try {
driver.manage().timeouts()
.implicitlyWait(spooky.conf.remoteResourceTimeout.toSeconds, TimeUnit.SECONDS)
.pageLoadTimeout(spooky.conf.remoteResourceTimeout.toSeconds, TimeUnit.SECONDS)
.setScriptTimeout(spooky.conf.remoteResourceTimeout.toSeconds, TimeUnit.SECONDS)
val resolution = spooky.conf.browserResolution
if (resolution != null) driver.manage().window().setSize(new Dimension(resolution._1, resolution._2))
successful = true
driver
}
finally {
if (!successful){
driver.close()
driver.quit()
spooky.metrics.driverReclaimed += 1
}
}
}
}
override def close(): Unit = {
driver.close()
driver.quit()
spooky.metrics.driverReclaimed += 1
super.close()
}
}
class NoDriverSession(override val spooky: SpookyContext) extends Session(spooky) {
override val driver: CleanWebDriver = null
} | chenUT/spookystuff | core/src/main/scala/org/tribbloid/spookystuff/session/Session.scala | Scala | apache-2.0 | 2,570 |
package net.fehmicansaglam.tepkin.protocol.command
import net.fehmicansaglam.bson.util.Converters
import net.fehmicansaglam.bson.{BsonDocument, BsonDsl, Implicits}
import BsonDsl._
import Implicits._
import Converters.md5Hex
case class Authenticate(databaseName: String,
username: String,
password: String,
nonce: String) extends Command {
override val command: BsonDocument = {
("authenticate" := 1) ~
("user" := username) ~
("nonce" := nonce) ~
("key" := md5Hex(nonce + username + md5Hex(s"${username}:mongo:${password}")))
}
}
| danielwegener/tepkin | tepkin/src/main/scala/net/fehmicansaglam/tepkin/protocol/command/Authenticate.scala | Scala | apache-2.0 | 630 |
/**
* Copyright 2015 Mohiva Organisation (license at mohiva dot com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mohiva.play.silhouette.api.exceptions
/**
* An exception for all authenticator related errors.
*/
class AuthenticatorException(msg: String, cause: Throwable = null)
extends SilhouetteException(msg, cause)
| mohiva/play-silhouette | silhouette/app/com/mohiva/play/silhouette/api/exceptions/AuthenticatorException.scala | Scala | apache-2.0 | 854 |
/*
* Sonar Scalastyle Plugin
* Copyright (C) 2014 All contributors
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02
*/
package com.ncredinburgh.sonar.scalastyle
import org.slf4j.LoggerFactory
import org.sonar.api.profiles.{ProfileDefinition, RulesProfile}
import org.sonar.api.rules.ActiveRule
import org.sonar.api.utils.ValidationMessages
import scala.collection.JavaConversions._
import org.scalastyle.ScalastyleError
import scala.xml.XML
/**
* This class creates the default "Scalastyle" quality profile from Scalastyle's default_config.xml
*/
class ScalastyleQualityProfile(scalastyleRepository: ScalastyleRepository) extends ProfileDefinition {
private val log = LoggerFactory.getLogger(classOf[ScalastyleRepository])
private val defaultConfigRules = xmlFromClassPath("/default_config.xml") \\ "scalastyle" \ "check"
override def createProfile(validation: ValidationMessages): RulesProfile = {
val profile = RulesProfile.create(Constants.ProfileName, Constants.ScalaKey)
val enabledRules = defaultConfigRules filter (x => (x \ "@enabled").text.equals("true"))
val defaultKeys = enabledRules map (x => (x \ "@class").text)
val defaultRules = scalastyleRepository.createRules filter (rule => defaultKeys.contains(rule.getKey) )
val activeRules = defaultRules map (rule => profile.activateRule(rule, rule.getSeverity))
activeRules.foreach(setParameters(_))
profile
}
def setParameters(activeRule: ActiveRule) {
defaultConfigRules.find(x => (x \ "@class").text.equals(activeRule.getRuleKey) ) match {
case Some(rule) => {
val params = (rule \ "parameters" \ "parameter").map(n => ((n \ "@name").text, n.text )).toMap
params foreach { case (key, value) => activeRule.setParameter(key, value) }
}
case _ => log.warn("Default rule with key " + activeRule.getRuleKey + " could not found in default_config.xml")
}
}
private def xmlFromClassPath(s: String) = XML.load(classOf[ScalastyleError].getResourceAsStream(s))
}
| emrehan/sonar-scalastyle | src/main/scala/com/ncredinburgh/sonar/scalastyle/ScalastyleQualityProfile.scala | Scala | lgpl-3.0 | 2,689 |
package breeze.linalg
/*
Copyright 2012 David Hall
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import breeze.linalg.eig.Eig
import breeze.linalg.eigSym.EigSym
import breeze.linalg.functions.{evdr, svdr}
import breeze.linalg.qr.QR
import breeze.linalg.qrp.QRP
import breeze.linalg.svd.SVD
import breeze.stats.distributions.{RandBasis, MultivariateGaussian}
import org.scalacheck.{Arbitrary,Gen,Prop}
import org.scalatest._
import org.scalatest.junit._
import org.scalatest.prop._
import org.junit.runner.RunWith
import breeze.util.DoubleImplicits
import breeze.numerics._
import breeze.stats.{mean, median, meanAndVariance}
import breeze.{math => bmath}
/**
*
* @author dlwh
*/
@RunWith(classOf[JUnitRunner])
class LinearAlgebraTest extends FunSuite with Checkers with Matchers with DoubleImplicits {
test("kron") {
val a = DenseMatrix((1,2),(3,4))
val b = DenseMatrix((0,5),(6,7))
assert(kron(a,b) === DenseMatrix((0,5,0,10),(6,7,12,14),(0,15,0,20),(18,21,24,28)))
}
test("ranks") {
assert(ranks(DenseVector(1,2,3)).toList === List(1.0,2.0,3.0))
assert(ranks(DenseVector(3,-1,2)).toList === List(3.0,1.0,2.0))
assert(ranks(DenseVector(1,2,3,3)).toList === List(1.0,2.0,3.5,3.5))
assert(ranks(DenseVector(1,2,3,3,3)).toList === List(1.0,2.0,4.0,4.0,4.0))
}
test("cholesky") {
val A = DenseMatrix((1.0,0.0,0.0),(2.0,3.0,0.0),(4.0,5.0,6.0))
val Sigma = A * A.t
assert(cholesky(Sigma) === A)
}
test("eigSym") {
val A = DenseMatrix((9.0,0.0,0.0),(0.0,82.0,0.0),(0.0,0.0,25.0))
val EigSym(lambda, evs) = eigSym(A)
assert(lambda === DenseVector(9.0,25.0,82.0))
assert(evs === DenseMatrix((1.0,0.0,0.0),(0.0,0.0,1.0),(0.0,1.0,0.0)))
}
test("EVDR") {
val A = DenseMatrix((9.0, 0.0, 0.0), (0.0, 82.0, 0.0), (0.0, 0.0, 25.0))
val eigVals = DenseVector(9.0,25.0,82.0)
val eigVect = DenseMatrix((1.0, 0.0, 0.0), (0.0, 0.0, 1.0), (0.0, 1.0, 0.0))
val EigSym(lambda, evs) = evdr(A, 1)
val idx = argsort(lambda)
idx.zipWithIndex.map{ i =>
lambda(i._1) should be (eigVals(i._2) +- 1E-6)
vectorsNearlyEqual(evs(::, i._1), eigVect(::, i._2), 1E-6)
}
}
test("LUfactorization") {
val (m, _) = LU(DenseMatrix(( 29, 42, -4, 50, 1),
( 20,-31, 32, 21, 2),
(-47,-20, 24,-22, 3),
( 3, 17,-45, 23, 4)))
val aux = DenseMatrix((-47.0000, -20.0000, 24.0000, -22.0000, 3.0000),
( -0.4255, -39.5106, 42.2127, 11.6382, 3.2765),
( -0.6170, -0.7506, 42.4964, 45.1620, 5.3107),
( -0.0638, -0.3979, -0.6275, 54.5694, 8.8282))
matricesNearlyEqual(m, aux, 1E-4)
}
test("det") {
val A = DenseMatrix((9,26,21),(48,3,11),(7,48,26))
det(A) should be (13446.99999999 +- 1e-6)
val B = DenseMatrix((1,2,3),(4,5,-6),(7,8,9))
det(B) should be (-72.0 +- 1e-16)
val C = DenseMatrix((1,2,3),(2,4,6),(0,-1,0)) // 1st and 2nd row linearly dep.
det(C) should be (0.0 +- 1e-6)
val D = DenseMatrix((-1,1,-1),(1,2,3),(3,-10,1))
det(D) should be (-8.0 +- 1E-6)
}
test("logdet") {
val A = DenseMatrix((9,26,21),(48,3,11),(7,48,26))
val (signA, detA) = logdet(A)
detA should be (math.log(13446.99999999) +- 1e-8)
signA should be (1.0 +- 1e-8)
val B = DenseMatrix((1,2,3),(4,5,-6),(7,8,9))
val (signB, detB) = logdet(B)
detB should be (math.log(72.0) +- 1e-15)
assert(signB === -1.0)
val C = DenseMatrix((1,2,3),(2,4,6),(0,-1,0)) // 1st and 2nd row linearly dep.
val (signC, detC) = logdet(C)
detC should be (Double.NegativeInfinity +- 1e-15)
assert(signC === 0.0)
val D = DenseMatrix((-1,1,-1),(1,2,3),(3,-10,1))
val (signD, detD) = logdet(D)
detD should be (math.log(8) +- 1e-8)
assert(signD === -1.0)
}
test("cond") {
val A = DenseMatrix( (1.0, 0.0, -1.0), (0.0, 1.0, 0.0), (1.0, 0.0, 1.0))
assert((cond(A) - math.sqrt(2)).abs < 1E-6, cond(A))
A(0, 0) = -1.0 // row 0 and row 2 are linearly dependent now
assert(cond(A) === Double.PositiveInfinity)
}
test("inv") {
val X = DenseMatrix(( 29.0, 42.0, -4.0, 50.0),
( 20.0,-31.0, 32.0, 21.0),
(-47.0,-20.0, 24.0,-22.0),
( 3.0, 17.0,-45.0, 23.0))
val I = DenseMatrix.eye[Double](4)
matricesNearlyEqual(inv(X) * X, I)
}
test("pinv") {
val X = DenseMatrix((54.0, 95.0), (23.0, 25.0), (70.0, 41.0), (31.0, 19.0))
val I = DenseMatrix.eye[Double](2)
matricesNearlyEqual(pinv(X) * X, I)
}
test("pinv conditioning: #304") {
val m = DenseMatrix((0d,3d,6d), (0d,4d,7d), (0d,5d,9d))
val mi = pinv(m)
val eye: DenseMatrix[Double] = DenseMatrix.eye[Double](3)
eye(0, 0) = 0.0
matricesNearlyEqual(mi * m, eye)
}
test("cross") {
// specific example; with prime elements
val (v1, v2, r) = (DenseVector(13, 3, 7), DenseVector(5, 11, 17), DenseVector(-26, -186, 128))
assert(cross(v1, v2) === r)
assert(cross(v2, v1) === r * -1)
// test using a re-write of the cross-product equation and a scalacheck arbitrary generator
implicit def arb3DVector: Arbitrary[DenseVector[Double]] = Arbitrary {
for {
els <- Gen.containerOfN[Array, Double](3, Gen.chooseNum[Double](-100.0, 100.0))
} yield DenseVector(els(0), els(1), els(2))
}
check {(a: DenseVector[Double], b: DenseVector[Double]) =>
val r = DenseVector(
a(1) * b(2) - a(2) * b(1),
a(2) * b(0) - a(0) * b(2),
a(0) * b(1) - a(1) * b(0))
cross(a, b) == r
cross(b, a) == r * -1.0
}
// test the failure that should occur if a or b does not have 3 components
val v4comp = DenseVector(1,2,3,4)
intercept[IllegalArgumentException] {
cross(v4comp, v4comp)
}
}
test("rank") {
val r1 = DenseMatrix((1.0,2.0,3.0), (1.0,2.0,3.0), (1.0,2.0,3.0)) // rank 1 matrix
val r2 = DenseMatrix((1.0,2.0,3.0), (4.0,5.0,6.0), (7.0,8.0,9.0)) // rank 2 matrix
val r3 = DenseMatrix((1.0,2.0,3.0), (4.0,5.0,6.0), (6.0,8.0,9.0)) // rank 3 matrix
assert(rank(r1) === 1)
assert(rank(r2) === 2)
assert(rank(r3) === 3)
}
test("qr") {
val A = DenseMatrix((1.0, 1.0, 1.0), (4.0, 2.0, 1.0), (16.0, 4.0, 1.0))
val QR(_Q, _R) = qr(A)
assert( trace(_Q.t * _Q).closeTo(_Q.rows) )
for(i <- 0 until _R.rows; j <- 0 until i) {
assert(_R(i,j) === 0.0)
}
val reA: DenseMatrix[Double] = _Q * _R
matricesNearlyEqual(reA, A)
}
test("qr A[m, n], m < n") {
val A = DenseMatrix((1.0, 1.0, 1.0, 1.0), (4.0, 2.0, 1.0, 1.0), (16.0, 4.0, 1.0, 1.0))
val QR(_Q, _R) = qr(A)
assert((_Q.rows, _Q.cols) == (A.rows, A.rows))
assert((_R.rows, _R.cols) == (A.rows, A.cols))
assert( trace(_Q.t * _Q).closeTo(min(A.rows, A.cols)) )
for(i <- 0 until _R.rows; j <- 0 until i) {
assert(_R(i,j) === 0.0)
}
val reA: DenseMatrix[Double] = _Q * _R
matricesNearlyEqual(reA, A)
}
test("qr A[m, n], m > n") {
val A = DenseMatrix((1.0, 1.0, 1.0), (4.0, 2.0, 1.0), (16.0, 4.0, 1.0), (32.0, 8.0, 1.0))
val QR(_Q, _R) = qr(A)
assert((_Q.rows, _Q.cols) == (A.rows, A.rows))
assert((_R.rows, _R.cols) == (A.rows, A.cols))
assert( trace(_Q.t * _Q).closeTo(max(A.rows, A.cols)) )
for(i <- 0 until _R.rows; j <- 0 until i) {
assert(_R(i,j) === 0.0)
}
val reA: DenseMatrix[Double] = _Q * _R
matricesNearlyEqual(reA, A)
}
test("qr just[QR]") {
val A = DenseMatrix((1.0, 1.0, 1.0), (4.0, 2.0, 1.0), (16.0, 4.0, 1.0))
val QR(_Q, _R) = qr(A)
val _Q2 = qr.justQ(A)
assert (_Q2 === _Q)
assert (_R === qr.justR(A))
}
test("qr float A[m, n], m < n") {
val A = DenseMatrix((1.0f, 1.0f, 1.0f, 1.0f), (4.0f, 2.0f, 1.0f, 1.0f), (16.0f, 4.0f, 1.0f, 1.0f))
val QR(_Q, _R) = qr(A)
assert((_Q.rows, _Q.cols) == (A.rows, A.rows))
assert((_R.rows, _R.cols) == (A.rows, A.cols))
assert( trace(_Q.t * _Q).closeTo(min(A.rows, A.cols)) )
for(i <- 0 until _R.rows; j <- 0 until i) {
assert(_R(i,j) === 0.0f)
}
val reA: DenseMatrix[Float] = _Q * _R
matricesNearlyEqual_Float(reA, A)
}
test("qr float A[m, n], m > n") {
val A = DenseMatrix((1.0f, 1.0f, 1.0f), (4.0f, 2.0f, 1.0f), (16.0f, 4.0f, 1.0f), (32.0f, 8.0f, 1.0f))
val QR(_Q, _R) = qr(A)
assert((_Q.rows, _Q.cols) == (A.rows, A.rows))
assert((_R.rows, _R.cols) == (A.rows, A.cols))
assert( trace(_Q.t * _Q).closeTo(max(A.rows, A.cols)) )
for(i <- 0 until _R.rows; j <- 0 until i) {
assert(_R(i,j) === 0.0f)
}
val reA: DenseMatrix[Float] = _Q * _R
matricesNearlyEqual_Float(reA, A)
}
test("qr float just[QR]") {
val A = DenseMatrix((1.0f, 1.0f, 1.0f), (4.0f, 2.0f, 1.0f), (16.0f, 4.0f, 1.0f))
val QR(_Q, _R) = qr(A)
val _Q2 = qr.justQ(A)
assert (_Q2 === _Q)
assert (_R === qr.justR(A))
}
test("qrp") {
val A = DenseMatrix((1.0, 1.0, 1.0), (4.0, 2.0, 1.0), (16.0, 4.0, 1.0))
val QRP(_QQ, _RR, _P, _) = qrp(A)
val ap = A * convert(_P, Double)
assert(max(abs(_QQ * _RR - ap)) < 1E-8)
}
test("qr reduced A[m, n], m < n") {
val A = DenseMatrix((1.0, 1.0, 1.0, 1.0), (4.0, 2.0, 1.0, 1.0), (16.0, 4.0, 1.0, 1.0))
val QR(_Q, _R) = qr.reduced(A)
assert((_Q.rows, _Q.cols) == (A.rows, min(A.rows, A.cols)))
assert((_R.rows, _R.cols) == (min(A.rows, A.cols), A.cols))
assert( trace(_Q.t * _Q).closeTo(min(A.rows, A.cols)) )
for(i <- 0 until _R.rows; j <- 0 until i) {
assert(_R(i,j) === 0.0)
}
val reA: DenseMatrix[Double] = _Q * _R
matricesNearlyEqual(reA, A)
}
test("qr reduced A[m, n], m = n") {
val A = DenseMatrix((1.0, 1.0, 1.0), (4.0, 2.0, 1.0), (16.0, 4.0, 1.0))
val QR(_Q, _R) = qr.reduced(A)
assert((_Q.rows, _Q.cols) == (A.rows, min(A.rows, A.cols)))
assert((_R.rows, _R.cols) == (min(A.rows, A.cols), A.cols))
assert( trace(_Q.t * _Q).closeTo(min(A.rows, A.cols)) )
for(i <- 0 until _R.rows; j <- 0 until i) {
assert(_R(i,j) === 0.0)
}
val reA: DenseMatrix[Double] = _Q * _R
matricesNearlyEqual(reA, A)
}
test("qr reduced A[m, n], m > n") {
val A = DenseMatrix((1.0, 1.0, 1.0), (4.0, 2.0, 1.0), (16.0, 4.0, 1.0), (32.0, 8.0, 1.0))
val QR(_Q, _R) = qr.reduced(A)
assert((_Q.rows, _Q.cols) == (A.rows, min(A.rows, A.cols)))
assert((_R.rows, _R.cols) == (min(A.rows, A.cols), A.cols))
assert( trace(_Q.t * _Q).closeTo(min(A.rows, A.cols)) )
for(i <- 0 until _R.rows; j <- 0 until i) {
assert(_R(i,j) === 0.0)
}
val reA: DenseMatrix[Double] = _Q * _R
matricesNearlyEqual(reA, A)
}
test("qr reduced just[QR]") {
val A = DenseMatrix((1.0, 1.0, 1.0), (4.0, 2.0, 1.0), (16.0, 4.0, 1.0))
val QR(_Q, _R) = qr.reduced(A)
val _Q2 = qr.reduced.justQ(A)
assert (_Q2 === _Q)
assert (_R === qr.reduced.justR(A))
}
test("qr float reduced A[m, n], m < n") {
val A = DenseMatrix((1.0f, 1.0f, 1.0f, 1.0f), (4.0f, 2.0f, 1.0f, 1.0f), (16.0f, 4.0f, 1.0f, 1.0f))
val QR(_Q, _R) = qr.reduced(A)
assert((_Q.rows, _Q.cols) == (A.rows, min(A.rows, A.cols)))
assert((_R.rows, _R.cols) == (min(A.rows, A.cols), A.cols))
assert( trace(_Q.t * _Q).closeTo(min(A.rows, A.cols)) )
for(i <- 0 until _R.rows; j <- 0 until i) {
assert(_R(i,j) === 0.0f)
}
val reA: DenseMatrix[Float] = _Q * _R
matricesNearlyEqual_Float(reA, A)
}
test("qr float reduced A[m, n], m = n") {
val A = DenseMatrix((1.0f, 1.0f, 1.0f), (4.0f, 2.0f, 1.0f), (16.0f, 4.0f, 1.0f))
val QR(_Q, _R) = qr.reduced(A)
assert((_Q.rows, _Q.cols) == (A.rows, min(A.rows, A.cols)))
assert((_R.rows, _R.cols) == (min(A.rows, A.cols), A.cols))
assert( trace(_Q.t * _Q).closeTo(min(A.rows, A.cols)) )
for(i <- 0 until _R.rows; j <- 0 until i) {
assert(_R(i,j) === 0.0f)
}
val reA: DenseMatrix[Float] = _Q * _R
matricesNearlyEqual_Float(reA, A)
}
test("qr float reduced A[m, n], m > n") {
val A = DenseMatrix((1.0f, 1.0f, 1.0f), (4.0f, 2.0f, 1.0f), (16.0f, 4.0f, 1.0f), (32.0f, 8.0f, 1.0f))
val QR(_Q, _R) = qr.reduced(A)
assert((_Q.rows, _Q.cols) == (A.rows, min(A.rows, A.cols)))
assert((_R.rows, _R.cols) == (min(A.rows, A.cols), A.cols))
assert( trace(_Q.t * _Q).closeTo(min(A.rows, A.cols)) )
for(i <- 0 until _R.rows; j <- 0 until i) {
assert(_R(i,j) === 0.0f)
}
val reA: DenseMatrix[Float] = _Q * _R
matricesNearlyEqual_Float(reA, A)
}
test("qr float reduced just[QR]") {
val A = DenseMatrix((1.0f, 1.0f, 1.0f), (4.0f, 2.0f, 1.0f), (16.0f, 4.0f, 1.0f))
val QR(_Q, _R) = qr.reduced(A)
val _Q2 = qr.reduced.justQ(A)
assert (_Q2 === _Q)
assert (_R === qr.reduced.justR(A))
}
test("simple eig test") {
val Eig(w, _, v) = eig(diag(DenseVector(1.0, 2.0, 3.0)))
assert(w === DenseVector(1.0, 2.0, 3.0))
assert(v === diag(DenseVector(1.0, 1.0, 1.0)))
}
test("complex eig test") {
// complex, get it?
val Eig(w, wi, v) = eig(DenseMatrix((1.0, -1.0), (1.0, 1.0)))
assert(w === DenseVector(1.0, 1.0))
assert(wi === DenseVector(1.0, -1.0))
assert(max(abs(v - diag(DenseVector(0.7071067811865475, -0.7071067811865475)))) < 1E-7)
// TODO, we seem to throw out VI... these seems bad...
}
test("eig nans") {
intercept[IllegalArgumentException] {
eig(diag(DenseVector(Double.NaN, 2.0, 3.0)))
}
}
test("svd A(m, n), m > n") {
val m = DenseMatrix((2.0,4.0),(1.0,3.0),(0.0,0.0),(0.0,0.0))
val SVD(u, s, vt) = svd(m)
// u and vt are unitary
trace(u.t * u) should be (u.rows.toDouble +- 1E-5)
trace(vt * vt.t) should be (vt.rows.toDouble +- 1E-5)
// s is sorted by size of singular value, and be nonnegative
for(i <- 1 until s.length) {
assert(s(i) <= s(i-1), s"s($i) > s(${i-1}): ${s(i)} > ${s(i-1)}")
assert(s(i) >= 0, s"s($i) < 0: ${s(i)}")
}
val ss = DenseMatrix.zeros[Double](m.rows, m.cols)
diag(ss(0 until s.length, 0 until s.length)) := s
val reM: DenseMatrix[Double] = u * ss * vt
matricesNearlyEqual(reM, m)
}
test("svd A(m, n), m < n") {
val m = DenseMatrix((2.0,4.0),(1.0,3.0),(0.0,0.0),(0.0,0.0)).t
val SVD(u, s, vt) = svd(m)
// u and vt are unitary
trace(u.t * u) should be (u.rows.toDouble +- 1E-5)
trace(vt * vt.t) should be (vt.rows.toDouble +- 1E-5)
// s is sorted by size of singular value, and be nonnegative
for(i <- 1 until s.length) {
assert(s(i) <= s(i-1), s"s($i) > s(${i-1}): ${s(i)} > ${s(i-1)}")
assert(s(i) >= 0, s"s($i) < 0: ${s(i)}")
}
val ss = DenseMatrix.zeros[Double](m.rows, m.cols)
diag(ss(0 until s.length, 0 until s.length)) := s
val reM: DenseMatrix[Double] = u * ss * vt
matricesNearlyEqual(reM, m)
}
test("svd float A(m, n), m > n") {
val m: DenseMatrix[Float] = DenseMatrix((2.0f,4.0f),(1.0f,3.0f),(0.0f,0.0f),(0.0f,0.0f))
val SVD(u, s, vt) = svd(m)
// u and vt are unitary
trace(u.t * u) should be (u.rows.toFloat +- 1E-5f)
trace(vt * vt.t) should be (vt.rows.toFloat +- 1E-5f)
// s is sorted by size of singular value, and be nonnegative
for(i <- 1 until s.length) {
assert(s(i) <= s(i-1), s"s($i) > s(${i-1}): ${s(i)} > ${s(i-1)}")
assert(s(i) >= 0, s"s($i) < 0: ${s(i)}")
}
val ss = DenseMatrix.zeros[Float](m.rows, m.cols)
diag(ss(0 until s.length, 0 until s.length)) := s
val reM: DenseMatrix[Float] = u * ss * vt
// matricesNearlyEqual(reM, m)
for(i <- 0 until reM.rows; j <- 0 until reM.cols)
reM(i,j) should be (m(i, j) +- 1E-6f)
}
test("svd float A(m, n), m < n") {
val m: DenseMatrix[Float] = DenseMatrix((2.0f,4.0f),(1.0f,3.0f),(0.0f,0.0f),(0.0f,0.0f)).t
val SVD(u, s, vt) = svd(m)
// u and vt are unitary
trace(u.t * u) should be (u.rows.toFloat +- 1E-5f)
trace(vt * vt.t) should be (vt.rows.toFloat +- 1E-5f)
// s is sorted by size of singular value, and be nonnegative
for(i <- 1 until s.length) {
assert(s(i) <= s(i-1), s"s($i) > s(${i-1}): ${s(i)} > ${s(i-1)}")
assert(s(i) >= 0, s"s($i) < 0: ${s(i)}")
}
val ss = DenseMatrix.zeros[Float](m.rows, m.cols)
diag(ss(0 until s.length, 0 until s.length)) := s
val reM: DenseMatrix[Float] = u * ss * vt
// matricesNearlyEqual(reM, m)
for(i <- 0 until reM.rows; j <- 0 until reM.cols)
reM(i,j) should be (m(i, j) +- 1E-5f)
}
test("svd reduced A(m, n), m > n") {
val m = DenseMatrix((2.0,4.0),(1.0,3.0),(0.0,0.0),(0.0,0.0))
val SVD(u, s, vt) = svd.reduced(m)
// u and vt are unitary
trace(u.t * u) should be (u.cols.toDouble +- 1E-5)
trace(vt * vt.t) should be (vt.rows.toDouble +- 1E-5)
// s is sorted by size of singular value, and be nonnegative
for(i <- 1 until s.length) {
assert(s(i) <= s(i-1), s"s($i) > s(${i-1}): ${s(i)} > ${s(i-1)}")
assert(s(i) >= 0, s"s($i) < 0: ${s(i)}")
}
val ss = DenseMatrix.zeros[Double](m.rows min m.cols, m.rows min m.cols)
diag(ss(0 until s.length, 0 until s.length)) := s
val reM: DenseMatrix[Double] = u * ss * vt
matricesNearlyEqual(reM, m)
}
test("svd reduced A(m, n), m < n") {
val m = DenseMatrix((2.0,4.0),(1.0,3.0),(0.0,0.0),(0.0,0.0)).t
val SVD(u, s, vt) = svd.reduced(m)
// u and vt are unitary
trace(u.t * u) should be (u.cols.toDouble +- 1E-5)
trace(vt * vt.t) should be (vt.rows.toDouble +- 1E-5)
// s is sorted by size of singular value, and be nonnegative
for(i <- 1 until s.length) {
assert(s(i) <= s(i-1), s"s($i) > s(${i-1}): ${s(i)} > ${s(i-1)}")
assert(s(i) >= 0, s"s($i) < 0: ${s(i)}")
}
val ss = DenseMatrix.zeros[Double](m.rows min m.cols, m.rows min m.cols)
diag(ss(0 until s.length, 0 until s.length)) := s
val reM: DenseMatrix[Double] = u * ss * vt
matricesNearlyEqual(reM, m)
}
test("svd reduced A(m, n), m = n") {
val m = DenseMatrix((2.0,4.0),(1.0,3.0))
val SVD(u, s, vt) = svd.reduced(m)
// u and vt are unitary
trace(u.t * u) should be (u.cols.toDouble +- 1E-5)
trace(vt * vt.t) should be (vt.rows.toDouble +- 1E-5)
// s is sorted by size of singular value, and be nonnegative
for(i <- 1 until s.length) {
assert(s(i) <= s(i-1), s"s($i) > s(${i-1}): ${s(i)} > ${s(i-1)}")
assert(s(i) >= 0, s"s($i) < 0: ${s(i)}")
}
val ss = DenseMatrix.zeros[Double](m.rows min m.cols, m.rows min m.cols)
diag(ss(0 until s.length, 0 until s.length)) := s
val reM: DenseMatrix[Double] = u * ss * vt
matricesNearlyEqual(reM, m)
}
test("svd reduced float A(m, n), m > n") {
val m = DenseMatrix((2.0f,4.0f),(1.0f,3.0f),(0.0f,0.0f),(0.0f,0.0f))
val SVD(u, s, vt) = svd.reduced(m)
// u and vt are unitary
trace(u.t * u) should be (u.cols.toFloat +- 1E-5f)
trace(vt * vt.t) should be (vt.rows.toFloat +- 1E-5f)
// s is sorted by size of singular value, and be nonnegative
for(i <- 1 until s.length) {
assert(s(i) <= s(i-1), s"s($i) > s(${i-1}): ${s(i)} > ${s(i-1)}")
assert(s(i) >= 0, s"s($i) < 0: ${s(i)}")
}
val ss = DenseMatrix.zeros[Float](m.rows min m.cols, m.rows min m.cols)
diag(ss(0 until s.length, 0 until s.length)) := s
val reM: DenseMatrix[Float] = u * ss * vt
// matricesNearlyEqual(reM, m)
for(i <- 0 until reM.rows; j <- 0 until reM.cols)
reM(i,j) should be (m(i, j) +- 1E-6f)
}
test("svd reduced float A(m, n), m = n") {
val m = DenseMatrix((2.0f,4.0f),(1.0f,3.0f))
val SVD(u, s, vt) = svd.reduced(m)
// u and vt are unitary
trace(u.t * u) should be (u.cols.toFloat +- 1E-5f)
trace(vt * vt.t) should be (vt.rows.toFloat +- 1E-5f)
// s is sorted by size of singular value, and be nonnegative
for(i <- 1 until s.length) {
assert(s(i) <= s(i-1), s"s($i) > s(${i-1}): ${s(i)} > ${s(i-1)}")
assert(s(i) >= 0, s"s($i) < 0: ${s(i)}")
}
val ss = DenseMatrix.zeros[Float](m.rows min m.cols, m.rows min m.cols)
diag(ss(0 until s.length, 0 until s.length)) := s
val reM: DenseMatrix[Float] = u * ss * vt
// matricesNearlyEqual(reM, m)
for(i <- 0 until reM.rows; j <- 0 until reM.cols)
reM(i,j) should be (m(i, j) +- 1E-5f)
}
test("svd reduced float A(m, n), m < n") {
val m = DenseMatrix((2.0f,4.0f),(1.0f,3.0f),(0.0f,0.0f),(0.0f,0.0f)).t
val SVD(u, s, vt) = svd.reduced(m)
// u and vt are unitary
trace(u.t * u) should be (u.cols.toFloat +- 1E-5f)
trace(vt * vt.t) should be (vt.rows.toFloat +- 1E-5f)
// s is sorted by size of singular value, and be nonnegative
for(i <- 1 until s.length) {
assert(s(i) <= s(i-1), s"s($i) > s(${i-1}): ${s(i)} > ${s(i-1)}")
assert(s(i) >= 0, s"s($i) < 0: ${s(i)}")
}
val ss = DenseMatrix.zeros[Float](m.rows min m.cols, m.rows min m.cols)
diag(ss(0 until s.length, 0 until s.length)) := s
val reM: DenseMatrix[Float] = u * ss * vt
// matricesNearlyEqual(reM, m)
for(i <- 0 until reM.rows; j <- 0 until reM.cols)
reM(i,j) should be (m(i, j) +- 1E-5f)
}
test("svd and svdr singular values are equal") {
val a = DenseMatrix(
(2.0, 4.0, 0.0),
(1.0, 3.0, 4.0),
(5.0, 0.0, 0.9),
(3.0, 5.0, 0.5),
(7.5, 1.0, 6.0),
(0.0, 7.0, 0.0)
)
for (m <- List(a, a.t)) {
val SVD(u, s, v) = svd.reduced(m)
val SVD(ur, sr, vr) = svdr(m, m.rows min m.cols)
vectorsNearlyEqual(s, sr)
matricesNearlyEqual(abs(u), abs(ur))
matricesNearlyEqual(abs(v), abs(vr))
}
}
test("svdr A[m, n], m < n") {
val m = DenseMatrix(
(2.0, 4.0, 0.0),
(1.0, 3.0, 4.0),
(5.0, 0.0, 0.9),
(3.0, 5.0, 0.5),
(7.5, 1.0, 6.0),
(0.0, 7.0, 0.0)
).t
val SVD(u, sr, vt) = svdr(m, m.rows min m.cols)
val reM = u * diag(sr) * vt
matricesNearlyEqual(reM, m)
}
test("svdr A[m, n], m > n") {
val m = DenseMatrix(
(2.0, 4.0, 0.0),
(1.0, 3.0, 4.0),
(5.0, 0.0, 0.9),
(3.0, 5.0, 0.5),
(7.5, 1.0, 6.0),
(0.0, 7.0, 0.0)
)
val SVD(u, sr, vt) = svdr(m, m.rows min m.cols)
val reM = u * diag(sr) * vt
matricesNearlyEqual(reM, m)
}
test("csc svd"){
val m1 = DenseMatrix((2.0,4.0,0.0,1.0,2.0),(1.0,0.0,2.0,1.0,0.0),
(1.0,3.0,2.0,1.0,9.0),(0.0,0.0,2.0,0.0,5.0),(0.0,1.5,0.0,0.0,5.0),(1.5,0.0,2.0,0.0,5.0))
val m2 = CSCMatrix((2.0,4.0,0.0,1.0,2.0),(1.0,0.0,2.0,1.0,0.0),
(1.0,3.0,2.0,1.0,9.0),(0.0,0.0,2.0,0.0,5.0),(0.0,1.5,0.0,0.0,5.0),(1.5,0.0,2.0,0.0,5.0))
def checkCols(m1 : DenseMatrix[Double], m2 : DenseMatrix[Double]) = {
for (i <- 0 until m1.cols) {
val v1 = if (m1(::,i).valueAt(0) > 0) m1(::,i) else -m1(::,i)
val v2 = if (m2(::,i).valueAt(0) > 0) m2(::,i) else -m2(::,i)
assert(max(abs(v1 - v2)) < 1E-5)
assert(abs(norm(v1) - 1.0) < 1E-5)
assert(abs(norm(v2) - 1.0) < 1E-5)
}
}
val SVD(u1, s1, vt1) = svd(m1)
val SVD(u2, s2, vt2) = svd(m2,2)
assert(max(abs(s1.slice(0,2) - s2)) < 1E-5)
checkCols(u1(::, 0 until 2), u2)
checkCols(vt1(0 until 2, ::).t, vt2.t)
val SVD(u1t, s1t, vt1t) = svd(m1.t)
val SVD(u2t, s2t, vt2t) = svd(m2.t,2)
assert(max(abs(s1t.slice(0,2) - s2t)) < 1E-5)
checkCols(u1t(::, 0 until 2), u2t)
checkCols(vt1t(0 until 2, ::).t, vt2t.t)
}
test("small pow test") {
val X = DenseMatrix(( .7, .2), (.3, .8))
assert(mpow(X, 1) === X)
assert( max(abs(mpow(X, .5) - DenseMatrix((.82426, 0.11716), (.17574, 0.88284)))) < 1E-5, mpow(X, .5))
}
test("diff test") {
val testThreshold = 1.0E-15
val xDouble = DenseVector( .7, .2, .3, .8)
assert( norm( diff(xDouble) - DenseVector(-0.5, 0.1, 0.5) ) < testThreshold)
assert( norm( diff(xDouble, 2) - DenseVector(0.6, 0.4) ) < testThreshold)
val x1 = DenseVector( .7)
assert(diff(x1) == DenseVector[Double]())
val xInt = DenseVector( 7, 2, 3, 8)
assert(diff(xInt, 3) == DenseVector(-2))
}
test("diff slice vector test") {
val testThreshold = 1.0E-15
val xDouble = {
val temp = DenseVector( .7, .2, .3, .8)
temp(IndexedSeq(0,1,2,3))
}
assert( norm( diff(xDouble) - DenseVector(-0.5, 0.1, 0.5) ) < testThreshold)
val x1 = DenseVector( .7)
assert(diff(x1) == DenseVector[Double]())
val vec = DenseVector(1,2,3,4,5,6,7,8,9,10)
val seq = vec.findAll(_ % 2 == 0) //Even Numbers
val slice = new SliceVector(vec,seq) //Note: No companion object, requires new
val difference = diff(slice)
}
test("reverse test") {
val xDouble = DenseVector( .7, .2, .3, .8)
assert( reverse(xDouble) == DenseVector(.8, .3, .2, .7) )
val xEmpty = DenseVector[Long]()
assert( reverse(xEmpty) == DenseVector[Long]() )
val a = SparseVector.tabulate(5)(identity)
val b = SparseVector.zeros[Double](5)
assert(reverse(a) === SparseVector.tabulate(5)((i: Int) => 4 - i))
assert(reverse(b) === b)
b(2) = 2.0
assert(reverse(b) === b)
b(0) = 0.1
assert(reverse(b) === SparseVector[Double](5)((2,2.0),(4,0.1)))
b(4) = 4.0
assert(reverse(b) === SparseVector[Double](5)((0,4.0),(2,2.0),(4,0.1)))
}
test("reshape test") {
val asv = SparseVector.tabulate(6)(identity(_) + 1)
assert(reshape(asv,2,3) === CSCMatrix((1,2,3),(4,5,6)))
assert(reshape(asv,3,2) === CSCMatrix((1,2),(3,4),(5,6)))
val bsv = SparseVector.zeros[Double](6)
assert(reshape(bsv,2,3) === CSCMatrix.zeros[Double](2,3))
val acsc = CSCMatrix.tabulate(2,3)((i,j) => (i+1) * (j+1) + (i+1))
val ad = DenseMatrix.tabulate(2,3)((i,j) => (i+1) * (j+1) + (i+1))
assert(reshape(acsc,3,2).toDense == reshape(ad,3,2))
assert(reshape(acsc,1,6).toDense == reshape(ad,1,6))
assert(reshape(acsc,6,1).toDense == reshape(ad,6,1))
val bcsc = CSCMatrix.zeros[Int](5,3)
val rcsc = CSCMatrix.zeros[Int](3,5)
val colRowGen = for {
r <- Gen.choose(0,4)
c <- Gen.choose(0,2)
} yield (r,c)
val pcLists = Prop.forAll(colRowGen){ case (r: Int,c: Int) =>
val dld = c * 5 + r
bcsc(r, c) = dld
rcsc(dld % 3, dld / 3) = dld
reshape(bcsc, 3, 5) === rcsc &&
reshape(rcsc, 5, 3) === bcsc
}
check(pcLists)
}
test("diag test") {
val testDV = DenseVector(0.1,1.1,2.1,3.1,4.1)
val testDM = DenseMatrix.tabulate[Double](5,5)((r,c) => if (r == c) r.toDouble + 0.1 else 0.0)
val testCSC = CSCMatrix.tabulate[Double](5,5)((r,c) => if (r == c) r.toDouble + 0.1 else 0.0)
val testSV = SparseVector(0.1,1.1,2.1,3.1,4.1)
assert(diag(testDV) === testDM)
assert(diag(testDM) === testDV)
assert(diag(testSV) === testCSC)
assert(diag(testCSC) === testSV)
}
test("accumulate test") {
val xDouble = DenseVector( .7, .2, .3, .8)
assert( norm(accumulate(xDouble) - DenseVector(.7, .9, 1.2, 2.0)) < 1.0E-15 )
val xInt = DenseVector( 7, 2, 3, 8)
assert( norm(accumulate(xInt) - DenseVector(7, 9, 12, 20)) < 1.0E-15 )
val xEmpty = DenseVector[Long]()
assert( accumulate(xEmpty) == DenseVector[Long]() )
}
/**
* Test based on the values in Lindsay Smith's tutorial:
*
* http://www.cs.otago.ac.nz/cosc453/student_tutorials/principal_components.pdf
*/
test("pca") {
// The data
val smithData = DenseMatrix(
(2.5,2.4), (0.5,0.7), (2.2,2.9), (1.9,2.2), (3.1,3.0),
(2.3,2.7), (2.0,1.6), (1.0,1.1), (1.5,1.6), (1.1,0.9))
// The correct answers bundled up.
object smithTruth {
val centeredData = DenseMatrix(
(0.69 , 0.4900000000000002 ),
(-1.31 , -1.2099999999999997 ),
(0.3900000000000001 , 0.9900000000000002 ),
(0.08999999999999986 , 0.2900000000000005 ),
(1.29 , 1.0900000000000003 ),
(0.48999999999999977 , 0.7900000000000005 ),
(0.18999999999999995 , -0.3099999999999996 ),
(-0.81 , -0.8099999999999996 ),
(-0.31000000000000005, -0.3099999999999996 ),
(-0.71 , -1.0099999999999998 ))
val covmat = DenseMatrix(
(0.6165555555555556, 0.6154444444444445),
(0.6154444444444445, 0.7165555555555555))
val eigenvalues =
DenseVector(1.2840277121727839, 0.04908339893832735)
val eigenvectors = DenseMatrix(
(-0.6778733985280118, -0.735178655544408),
(-0.735178655544408, 0.6778733985280118))
val scores = DenseMatrix(
( -0.8279701862010882, -0.17511530704691552 ),
(1.7775803252804288 , 0.14285722654428046 ),
(-0.9921974944148888 , 0.3843749888804126 ),
(-0.27421041597539964, 0.13041720657412714 ),
(-1.6758014186445402 , -0.2094984612567533 ),
(-0.9129491031588082 , 0.17528244362036988 ),
(0.099109437498444 , -0.34982469809712086 ),
(1.1445721637986597 , 0.04641725818328124 ),
(0.43804613676244986 , 0.017764629675083132 ),
(1.2238205550547403 , -0.16267528707676204 ))
}
val pca = princomp(smithData)
matricesNearlyEqual(smithData(*,::) - pca.center, smithTruth.centeredData)
matricesNearlyEqual(pca.covmat, smithTruth.covmat)
matricesNearlyEqual(pca.covmat, smithTruth.covmat)
vectorsNearlyEqual(pca.eigenvalues, smithTruth.eigenvalues)
matricesNearlyEqual(pca.loadings, smithTruth.eigenvectors)
matricesNearlyEqual(pca.scores, smithTruth.scores)
}
def vectorsNearlyEqual(A: DenseVector[Double], B: DenseVector[Double], threshold: Double = 1E-6) {
for(i <- 0 until A.length)
A(i) should be (B(i) +- threshold)
}
def matricesNearlyEqual(A: DenseMatrix[Double], B: DenseMatrix[Double], threshold: Double = 1E-6) {
for(i <- 0 until A.rows; j <- 0 until A.cols)
A(i,j) should be (B(i, j) +- threshold)
}
def matricesNearlyEqual_Float(A: DenseMatrix[Float], B: DenseMatrix[Float], threshold: Float = 1E-6f) {
for(i <- 0 until A.rows; j <- 0 until A.cols)
A(i,j) should be (B(i, j) +- threshold)
}
test("RangeExtender test") {
val xInt = DenseVector(0, 1, 2, 3, 4, 5)
val rangeIncl = 3 to 5
val rangeInclN1 = 3 to -1
val rangeInclN2 = -3 to 5
val rangeInclN3 = -1 to 3 by -1
val rangeInclN4 = 5 to -3 by -1
val rangeInclN5 = -1 to -3 by -1
val rangeExcl = 0 until 5
val rangeExclN1 = 3 until -1
val rangeExclN2 = -1 until 3 by -1
assert( xInt( rangeIncl ) == DenseVector(3, 4, 5), "range inclusive" )
// println( rangeInclN1.start + " " + rangeInclN1.end + " " + rangeInclN1.step)
// val rangeInclN1r = rangeInclN1.getRangeWithoutNegativeIndexes( xInt.length )
// println( rangeInclN1r.start + " " + rangeInclN1r.end + " " + rangeInclN1r.step)
assert( xInt( rangeInclN1 ) == DenseVector(3, 4, 5), "range inclusive, negative end" )
assert( xInt( rangeInclN2 ) == DenseVector(3, 4, 5), "range inclusive, negative start" )
assert( xInt( rangeInclN3 ) == DenseVector(5, 4, 3), "range inclusive, negative start/step" )
assert( xInt( rangeInclN4 ) == DenseVector(5, 4, 3), "range inclusive, negative end/step" )
assert( xInt( rangeInclN5 ) == DenseVector(5, 4, 3), "range inclusive, negative start/end/step" )
assert( xInt( rangeExcl ) == DenseVector(0, 1, 2, 3, 4), "range exclusive" )
intercept[IllegalArgumentException]{ xInt(rangeExclN1) }
intercept[IllegalArgumentException]{ xInt(rangeExclN2) }
}
test("#356 symmetric matrix sensitivity") {
val n = 20
val q = DenseVector.rand[Double](n, RandBasis.mt0.uniform)
val A = DenseMatrix.eye[Double](n) + q * q.t
val B = inv(A)
val u = DenseVector.zeros[Double](n)
// this throws the error
val D = MultivariateGaussian(u, B)
}
test("#410 sum colls") {
val dvs = Iterator.tabulate(100)(i => DenseVector(i))
assert(sum(dvs) == DenseVector((0 until 100).sum))
}
}
| claydonkey/breeze | math/src/test/scala/breeze/linalg/LinearAlgebraTest.scala | Scala | apache-2.0 | 32,536 |
package nozzle.auth
import scala.concurrent.ExecutionContext
import scalaz._
import Scalaz._
import scalaz.EitherT._
import nozzle.webresult._
import nozzle.monadicctrl._
package monadicctrl {
// type A <: Authenticator
class Controller[A <: Authenticator](authenticator: A)(implicit executionContext: ExecutionContext) {
def login(credentials: authenticator.type#LoginCredentials): FutureCtrlFlow[A#AuthCredentials] = eitherT {
authenticator.login(credentials).map {
case Some((_, authCredentials)) => authCredentials.right
case None => WebError.InvalidCredentials.left
}
}
}
}
| buildo/nozzle | src/main/scala/auth/monadicCtrl.scala | Scala | mit | 628 |
package io.taig.android.system_service
trait implicits extends syntax.all
object implicits extends implicits
| Taig/Toolbelt | system-service/src/main/scala/io/taig/android/system_service/implicits.scala | Scala | mit | 111 |
package org.jetbrains.plugins.scala
package lang.refactoring.changeSignature
import com.intellij.codeInsight.daemon.impl.analysis.{FileHighlightingSetting, HighlightLevelUtil}
import com.intellij.openapi.actionSystem.{AnActionEvent, CustomShortcutSet}
import com.intellij.openapi.editor.event.{DocumentEvent, DocumentListener}
import com.intellij.openapi.fileTypes.LanguageFileType
import com.intellij.openapi.project.Project
import com.intellij.openapi.ui.ValidationInfo
import com.intellij.openapi.util.text.StringUtil
import com.intellij.psi.{util => _, _}
import com.intellij.refactoring.changeSignature.{CallerChooserBase, ChangeSignatureDialogBase, ParameterTableModelItemBase}
import com.intellij.refactoring.ui.{CodeFragmentTableCellEditorBase, StringTableCellEditor, VisibilityPanelBase}
import com.intellij.refactoring.{BaseRefactoringProcessor, RefactoringBundle}
import com.intellij.ui.table.{JBTable, TableView}
import com.intellij.ui.treeStructure.Tree
import com.intellij.ui.{util => _, _}
import com.intellij.util.Consumer
import com.intellij.util.ui.table.{JBListTable, JBTableRowEditor, JBTableRowRenderer}
import com.intellij.util.ui.{StartupUiUtil, UIUtil}
import org.jetbrains.plugins.scala.icons.Icons
import org.jetbrains.plugins.scala.lang.psi.api.base.ScPrimaryConstructor
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createTypeFromText
import org.jetbrains.plugins.scala.lang.psi.impl.source.ScalaCodeFragment
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.refactoring.ScalaNamesValidator.isIdentifier
import org.jetbrains.plugins.scala.lang.refactoring.changeSignature.changeInfo.ScalaChangeInfo
import org.jetbrains.plugins.scala.lang.refactoring.extractMethod.ScalaExtractMethodUtils
import org.jetbrains.plugins.scala.lang.refactoring.ui.ScalaComboBoxVisibilityPanel
import org.jetbrains.plugins.scala.settings.annotations._
import org.jetbrains.plugins.scala.util.TypeAnnotationUtil
import java.awt._
import java.awt.event.ActionEvent
import java.util
import javax.swing._
import javax.swing.border.MatteBorder
import javax.swing.event.{ChangeEvent, HyperlinkEvent}
import javax.swing.table.TableCellEditor
import scala.collection.mutable
import scala.jdk.CollectionConverters._
//noinspection ConvertNullInitializerToUnderscore
class ScalaChangeSignatureDialog(val method: ScalaMethodDescriptor,
val needSpecifyTypeChb: Boolean)
(implicit val project: Project)
extends ChangeSignatureDialogBase[ScalaParameterInfo,
ScFunction,
String,
ScalaMethodDescriptor,
ScalaParameterTableModelItem,
ScalaParameterTableModel](project, method, false, method.fun) {
// Must not be initialized with any value!
// Will be set in createNorthPanel, which is called by super's constructor.
// Any initializer other than _ will override the values set by that method.
private var defaultValuesUsagePanel: DefaultValuesUsagePanel = _
private var mySpecifyTypeChb: JCheckBox = _
override def getFileType: LanguageFileType = ScalaFileType.INSTANCE
override def createCallerChooser(title: String, treeToReuse: Tree, callback: Consumer[util.Set[ScFunction]]): CallerChooserBase[ScFunction] = null
override def createRefactoringProcessor(): BaseRefactoringProcessor = {
val parameters = splittedItems.map(_.map(_.parameter))
val changeInfo =
ScalaChangeInfo(getVisibility, method.fun, getMethodName, returnType, parameters, isAddDefaultArgs, Some(mySpecifyTypeChb.isSelected))
new ScalaChangeSignatureProcessor(changeInfo)
}
override def createNorthPanel(): JComponent = {
val panel = super.createNorthPanel()
getMethodName match {
case "apply" | "unapply" | "unapplySeq" | "update" => myNameField.setEnabled(false)
case _ =>
}
val holder = new JPanel(new BorderLayout())
holder.add(panel, BorderLayout.NORTH)
holder.add(createDefaultArgumentPanel(), BorderLayout.LINE_START)
holder
}
protected def createDefaultArgumentPanel(): JPanel = {
val optionsPanel = new JPanel(new BorderLayout())
val label = new JLabel(ScalaBundle.message("parameter.label.default.value"))
defaultValuesUsagePanel = new DefaultValuesUsagePanel("")
val holder = new JPanel()
holder.add(label)
holder.add(defaultValuesUsagePanel)
optionsPanel.add(holder)
optionsPanel
}
override def createOptionsPanel(): JComponent = {
val panel = super.createOptionsPanel() //to initialize fields in base class
val holder: JPanel = new JPanel
holder.setLayout(new FlowLayout(FlowLayout.LEFT, 5, 0))
val specifyTypePanel = createTypePanel()
panel.add(specifyTypePanel)
panel.setVisible(needSpecifyTypeChb)
myPropagateParamChangesButton.setVisible(false)
panel
}
override def createVisibilityControl(): VisibilityPanelBase[String] = new ScalaComboBoxVisibilityPanel(getVisibility)
override def createParametersInfoModel(method: ScalaMethodDescriptor): ScalaParameterTableModel = {
new ScalaParameterTableModel(method.fun, method.fun, method)
}
override protected def createParametersPanel(hasTabsInDialog: Boolean): JPanel = {
myParametersTable = createParametersTable()
myParametersList = createParametersListTable()
decorateParameterTable(myParametersList.getTable)
}
protected def createParametersTable(): TableView[ScalaParameterTableModelItem] = {
new TableView[ScalaParameterTableModelItem](myParametersTableModel) {
override def removeEditor(): Unit = {
clearEditorListeners()
super.removeEditor()
}
override def editingStopped(e: ChangeEvent): Unit = {
super.editingStopped(e)
repaint()
}
private def clearEditorListeners(): Unit = {
val editor: TableCellEditor = getCellEditor
editor match {
case ed: StringTableCellEditor =>
ed.clearListeners()
case _ => editor match {
case base: CodeFragmentTableCellEditorBase =>
base.clearListeners()
case _ =>
}
}
}
override def prepareEditor(editor: TableCellEditor, row: Int, column: Int): Component = {
val listener: DocumentListener = new DocumentListener() {
override def documentChanged(e: DocumentEvent): Unit = {
val ed: TableCellEditor = parametersTable.getCellEditor
if (ed != null) {
val editorValue: AnyRef = ed.getCellEditorValue
myParametersTableModel.setValueAtWithoutUpdate(editorValue, row, column)
updateSignature()
}
}
}
editor match {
case ed: StringTableCellEditor =>
ed.addDocumentListener(listener)
case _ => editor match {
case base: CodeFragmentTableCellEditorBase =>
base.addDocumentListener(listener)
case _ =>
}
}
super.prepareEditor(editor, row, column)
}
override def editingCanceled(e: ChangeEvent): Unit = {
super.editingCanceled(e)
}
}
}
override def createReturnTypeCodeFragment(): PsiCodeFragment = {
val text = method.returnTypeText
val child = method.fun
val fragment = ScalaCodeFragment(text, child.getParent, child)
HighlightLevelUtil.forceRootHighlighting(fragment, FileHighlightingSetting.SKIP_HIGHLIGHTING)
fragment
}
override protected def createParametersListTable: ParametersListTable = new ScalaParametersListTable()
protected override def getTableEditor(t: JTable, item: ParameterTableModelItemBase[ScalaParameterInfo]): JBTableRowEditor = {
val scalaItem = item match {
case si: ScalaParameterTableModelItem => si
case _ => throw new IllegalArgumentException
}
new ScalaChangeSignatureRowEditor(scalaItem, this)
}
private def needsTypeAnnotation(method: ScalaMethodDescriptor, visibilityString: String = getVisibility): Boolean = {
val element = method.fun
ScalaTypeAnnotationSettings(element.getProject).isTypeAnnotationRequiredFor(
Declaration(element, Visibility(visibilityString)),
Location(element),
Some(Definition(element))
)
}
override def calculateSignature(): String = {
def nameAndType(item: ScalaParameterTableModelItem) = {
if (item.parameter.name == "") ""
else ScalaExtractMethodUtils.typedName(item.parameter.name, item.typeText)
}
def itemText(item: ScalaParameterTableModelItem) = item.keywordsAndAnnotations + nameAndType(item)
val visibility = getVisibility
val prefix = method.fun match {
case fun: ScFunction =>
val name = if (!fun.isConstructor) getMethodName else "this"
s"$visibility def $name"
case pc: ScPrimaryConstructor => s"class ${pc.getClassNameText} $getVisibility"
case _ => ""
}
val paramsText = splittedItems.map(_.map(itemText).mkString("(", ", ", ")")).mkString
val retTypeText = returnTypeText
val needType =
if (!needSpecifyTypeChb) true
else if (mySpecifyTypeChb != null) mySpecifyTypeChb.isSelected
else needsTypeAnnotation(method, visibility)
val typeAnnot =
if (retTypeText.isEmpty || !needType) ""
else s": $retTypeText"
s"$prefix$paramsText$typeAnnot"
}
override def validateAndCommitData(): String = {
val paramItems = parameterItems
val problems = mutable.ListBuffer.empty[String]
if (myReturnTypeCodeFragment != null) {
if (myReturnTypeCodeFragment.getText.isEmpty)
problems += RefactoringBundle.message("changeSignature.no.return.type")
else if (returnTypeText.isEmpty)
problems += RefactoringBundle.message("changeSignature.wrong.return.type", myReturnTypeCodeFragment.getText)
}
val paramNames = paramItems.map(_.parameter.name)
val names = if (myNameField.isEnabled) getMethodName +: paramNames else paramNames
problems ++= names.collect {
case name if !isIdentifier(name) => s"$name is not a valid scala identifier"
}
val namesWithIndices = paramNames.zipWithIndex
for {
(name, idx) <- namesWithIndices
(name2, idx2) <- namesWithIndices
if name == name2 && idx < idx2
} {
problems += ScalaBundle.message("change.signature.parameters.same.name.{0}", name)
}
paramItems.foreach(_.updateType(problems))
paramItems.foreach {
case item if item.parameter.isRepeatedParameter && !splittedItems.flatMap(_.lastOption).contains(item) =>
problems += ScalaBundle.message("change.signature.vararg.should.be.last.in.clause")
case _ =>
}
if (problems.isEmpty) null
else problems.distinct.mkString("\\n")
}
protected override def doValidate(): ValidationInfo = {
if (!getTableComponent.isEditing) {
for {
item <- parameterItems
if item.parameter.oldIndex < 0 && StringUtil.isEmpty(item.defaultValueCodeFragment.getText)
} {
return new ValidationInfo(
if (isAddDefaultArgs) ScalaBundle.message("default.value.is.missing.default.arguments")
else ScalaBundle.message("default.value.is.missing.method.calls")
)
}
}
super.doValidate()
}
override def updateSignatureAlarmFired(): Unit = {
super.updateSignatureAlarmFired()
if (getDefaultValuesPanel != null) {
if (parameterItems.exists(_.typeText.endsWith("*"))) getDefaultValuesPanel.forceIsModifyCalls()
else getDefaultValuesPanel.release()
}
}
override def dispose(): Unit = {
myParametersTableModel.clear()
super.dispose()
}
override def mayPropagateParameters(): Boolean = false
override def isListTableViewSupported: Boolean = true
override protected def postponeValidation: Boolean = false
def signatureUpdater: ChangeSignatureDialogBase[ScalaParameterInfo, ScFunction, String, ScalaMethodDescriptor, ScalaParameterTableModelItem, ScalaParameterTableModel]#UpdateSignatureListener = mySignatureUpdater
def getTypesMaxLength: Int =
parameterItems.map(_.typeText.length).maxOption.getOrElse(0)
def getNamesMaxLength: Int = {
parameterItems.map(_.parameter.getName.length).maxOption.getOrElse(0)
}
def parametersTable: JBTable = Option(myParametersList).map(_.getTable).orNull
protected def getDefaultValuesPanel: DefaultValuesUsagePanel = defaultValuesUsagePanel
protected def isAddDefaultArgs: Boolean = getDefaultValuesPanel.isAddDefaultArgs
protected def returnTypeText: String = Option(myReturnTypeCodeFragment).fold("")(_.getText)
protected def returnType: ScType =
Option(myReturnTypeCodeFragment).flatMap { fragment =>
createTypeFromText(fragment.getText, fragment.getContext, fragment)
}.getOrElse(api.Any)
protected def splittedItems: Seq[Seq[ScalaParameterTableModelItem]] = {
def inner(items: Seq[ScalaParameterTableModelItem]): Seq[Seq[ScalaParameterTableModelItem]] = {
if (items.isEmpty) return Seq(items)
val index = items.tail.indexWhere(_.startsNewClause)
if (index < 0) Seq(items)
else {
val (firstClause, rest) = items.splitAt(index + 1)
firstClause +: inner(rest)
}
}
inner(parameterItems)
}
protected def parameterItems: Seq[ScalaParameterTableModelItem] = {
myParametersTableModel.getItems.asScala.toSeq
}
protected def createAddClauseButton(): AnActionButton = {
val addClauseButton = new AnActionButton(ScalaBundle.message("change.signature.add.parameter.clause"), null, Icons.ADD_CLAUSE) {
override def actionPerformed(e: AnActionEvent): Unit = {
val table = parametersTable
val editedColumn = editingColumn(table)
TableUtil.stopEditing(table)
val selected = table.getSelectedRow
if (selected > 0) {
val item = myParametersTableModel.getItem(selected)
item.startsNewClause = true
myParametersTableModel.fireTableDataChanged()
}
finishAndRestoreEditing(editedColumn)
}
}
addClauseButton.addCustomUpdater((e: AnActionEvent) => {
val selected = parametersTable.getSelectedRow
selected > 0 && !myParametersTableModel.getItem(selected).startsNewClause
})
addClauseButton.setShortcut(CustomShortcutSet.fromString("alt EQUALS"))
addClauseButton
}
protected def createRemoveClauseButton(): AnActionButton = {
val removeClauseButton = new AnActionButton(ScalaBundle.message("change.signature.remove.parameter.clause"), null, Icons.REMOVE_CLAUSE) {
override def actionPerformed(e: AnActionEvent): Unit = {
val table = parametersTable
val editedColumn = editingColumn(table)
TableUtil.stopEditing(table)
val selected = table.getSelectedRow
if (selected > 0) {
val item = myParametersTableModel.getItem(selected)
item.startsNewClause = false
myParametersTableModel.fireTableDataChanged()
}
finishAndRestoreEditing(editedColumn)
}
}
removeClauseButton.addCustomUpdater((e: AnActionEvent) => {
val selected = parametersTable.getSelectedRow
selected > 0 && myParametersTableModel.getItem(selected).startsNewClause
})
removeClauseButton.setShortcut(CustomShortcutSet.fromString("alt MINUS"))
removeClauseButton
}
protected def downAction: AnActionButtonRunnable = new AnActionButtonRunnable {
override def run(t: AnActionButton): Unit = {
val table = parametersTable
val selected = table.getSelectedRow
if (selected < 0 || selected >= table.getModel.getRowCount - 1) return
val editedColumn = editingColumn(table)
TableUtil.stopEditing(table)
val itemBelow = myParametersTableModel.getItem(selected + 1)
val item = myParametersTableModel.getItem(selected)
if (itemBelow.startsNewClause) {
itemBelow.startsNewClause = false
if (selected > 0) item.startsNewClause = true
myParametersTableModel.fireTableDataChanged()
}
else {
if (item.startsNewClause) {
item.startsNewClause = false
itemBelow.startsNewClause = true
}
myParametersTableModel.exchangeRows(selected, selected + 1)
table.setRowSelectionInterval(selected + 1, selected + 1)
}
finishAndRestoreEditing(editedColumn)
}
}
protected def upAction: AnActionButtonRunnable = new AnActionButtonRunnable {
override def run(t: AnActionButton): Unit = {
val table = parametersTable
val selected = table.getSelectedRow
if (selected <= 0 || selected >= table.getModel.getRowCount) return
val editedColumn = editingColumn(table)
TableUtil.stopEditing(table)
val item = myParametersTableModel.getItem(selected)
if (item.startsNewClause) {
item.startsNewClause = false
if (selected != table.getModel.getRowCount - 1) {
val itemBelow = myParametersTableModel.getItem(selected + 1)
itemBelow.startsNewClause = true
}
myParametersTableModel.fireTableDataChanged()
}
else {
val itemAbove = myParametersTableModel.getItem(selected - 1)
if (itemAbove.startsNewClause) {
itemAbove.startsNewClause = false
item.startsNewClause = true
}
myParametersTableModel.exchangeRows(selected, selected - 1)
table.setRowSelectionInterval(selected - 1, selected - 1)
}
finishAndRestoreEditing(editedColumn)
}
}
protected def decorateParameterTable(table: JBTable): JPanel = {
table.setCellSelectionEnabled(true)
table.getSelectionModel.setSelectionMode(ListSelectionModel.SINGLE_SELECTION)
table.getSelectionModel.setSelectionInterval(0, 0)
table.setSurrendersFocusOnKeystroke(true)
val buttonsPanel: JPanel =
ToolbarDecorator.createDecorator(table)
.setMoveUpAction(upAction)
.setMoveDownAction(downAction)
.addExtraActions(createAddClauseButton(), createRemoveClauseButton())
.createPanel
myParametersTableModel.addTableModelListener(mySignatureUpdater)
buttonsPanel
}
private def finishAndRestoreEditing(editedColumn: Option[Int]): Unit = {
val table = parametersTable
TableUtil.updateScroller(table)
table.requestFocus()
editedColumn.foreach { col =>
val row = table.getSelectedRow
table.setRowHeight(row, table.getRowHeight)
table.editCellAt(row, col)
}
}
def clauseSeparatorColor: Color = {
val background = getContentPane.getBackground
if (StartupUiUtil.isUnderDarcula) background.brighter.brighter else background.darker()
}
private def editingColumn(table: JTable) = if (table.isEditing) Some(table.getEditingColumn) else None
private def createTypePanel(): JPanel = {
val typePanel = new JPanel
typePanel.setLayout(new FlowLayout(FlowLayout.LEFT, 0, 0))
mySpecifyTypeChb = new JCheckBox
mySpecifyTypeChb.setText(ScalaBundle.message("specify.result.type"))
mySpecifyTypeChb.setDisplayedMnemonicIndex(15)
typePanel.add(mySpecifyTypeChb)
val myLinkContainer = new JPanel
myLinkContainer.setLayout(new FlowLayout(FlowLayout.LEFT, 0, 0))
typePanel.add(myLinkContainer)
myLinkContainer.add(setUpHyperLink())
setUpSpecifyTypeChb()
setUpVisibilityListener()
typePanel
}
private def setUpHyperLink(): HyperlinkLabel = {
val link = TypeAnnotationUtil.createTypeAnnotationsHLink(project, ScalaBundle.message("default.ta.settings"))
link.addHyperlinkListener((e: HyperlinkEvent) => {
extensions.invokeLater {
mySpecifyTypeChb.setSelected(needsTypeAnnotation(method))
updateSignatureAlarmFired()
}
})
link
}
private def setUpVisibilityListener(): Unit = {
myVisibilityPanel.addListener((e: ChangeEvent) => {
mySpecifyTypeChb.setSelected(needsTypeAnnotation(method))
updateSignatureAlarmFired()
})
}
private def setUpSpecifyTypeChb(): Unit ={
mySpecifyTypeChb.setSelected(needsTypeAnnotation(method))
mySpecifyTypeChb.addActionListener((e: ActionEvent) => updateSignatureAlarmFired())
}
class ScalaParametersListTable extends ParametersListTable {
override protected def getRowRenderer(row: Int): JBTableRowRenderer = {
(table: JTable, row: Int, selected: Boolean, focused: Boolean) => {
val item = getRowItem(row)
val name = nameText(item)
val typeTxt = typeText(item)
val nameAndType =
if (name == "" && typeTxt == "") ""
else ScalaExtractMethodUtils.typedName(name, typeTxt)
val defText = defaultText(item)
val text = s"$nameAndType $defText"
val comp = JBListTable.createEditorTextFieldPresentation(project, getFileType, " " + text, selected, focused)
if (item.parameter.isIntroducedParameter) {
val fields = UIUtil.findComponentsOfType(comp, classOf[EditorTextField]).asScala
fields.foreach { f =>
f.setFont(f.getFont.deriveFont(Font.BOLD))
}
}
val color =
if (item.startsNewClause) clauseSeparatorColor
else if (selected && focused) parametersTable.getSelectionBackground else parametersTable.getBackground
comp.setBorder(new MatteBorder(2, 0, 0, 0, color))
comp
}
}
protected def nameText(item: ScalaParameterTableModelItem): String = {
val maxLength = parameterItems.map(_.parameter.getName.length)
.maxOption.getOrElse(0)
val name = item.parameter.getName
name + StringUtil.repeat(" ", maxLength - name.length)
}
protected def typeText(item: ScalaParameterTableModelItem): String = {
val maxLength =parameterItems.map(_.typeText.length)
.maxOption.getOrElse(0)
val typeText = item.typeText
typeText + StringUtil.repeat(" ", maxLength - typeText.length)
}
protected def defaultText(item: ScalaParameterTableModelItem): String = {
val defaultValue: String = item.defaultValueCodeFragment.getText
if (StringUtil.isNotEmpty(defaultValue)) " = " + defaultValue else ""
}
override protected def isRowEmpty(row: Int): Boolean = false
override def getRowItem(row: Int): ScalaParameterTableModelItem = myParametersTableModel.getRowValue(row)
override def getRowEditor(item: ParameterTableModelItemBase[ScalaParameterInfo]): JBTableRowEditor = getTableEditor(getTable, item)
}
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureDialog.scala | Scala | apache-2.0 | 22,687 |
trait Foo {
def getFoo() = "foo"
}
class Sub extends Foo {
def getBar() = "bar"
}
object Test {
def main(args: Array[String]): Unit = {
val ms = classOf[Sub].getDeclaredMethods
assert(ms forall (x => !x.isBridge), ms mkString " ")
}
}
| yusuke2255/dotty | tests/run/mixin-bridge-methods.scala | Scala | bsd-3-clause | 253 |
package jp.ne.opt.chronoscala
import java.time.{Clock, Instant}
trait InstantForwarder {
def now(): Instant = Instant.now(ClockProvider.clock)
def now(clock: Clock): Instant = Instant.now(clock)
def parse(str: String): Instant = Instant.parse(str)
def ofEpochMilli(millis: Long): Instant = Instant.ofEpochMilli(millis)
}
| opt-tech/chronoscala | shared/src/main/scala/jp/ne/opt/chronoscala/InstantForwarder.scala | Scala | mit | 336 |
package com.mesosphere.util
final case class AbsolutePath private(private val path: RelativePath) extends Path {
override type Self = AbsolutePath
def /(last: String): AbsolutePath = AbsolutePath(path / last) // scalastyle:ignore method.name
override def resolve(tail: RelativePath): AbsolutePath = AbsolutePath(path.resolve(tail))
/**
* The relative path that resolves to this path at `base`.
*
* @throws IllegalArgumentException if `base` is not a parent of, or identical to, this path.
*/
def relativize(base: AbsolutePath): RelativePath = RelativePath.relativize(this, base)
override def toString: String = Path.Separator + path.toString
def elements: Vector[String] = path.elements
}
object AbsolutePath {
val Root: AbsolutePath = AbsolutePath(RelativePath.Empty)
def apply(path: String): AbsolutePath = validate(path).fold(throw _, identity)
def validate(path: String): Either[AbsolutePath.Error, AbsolutePath] = {
if (path.isEmpty) Left(Empty)
else if (!path.startsWith(Path.Separator)) Left(Relative)
else {
RelativePath.validate(path.drop(1)) match {
case Right(relativePath) => Right(AbsolutePath(relativePath))
case Left(RelativePath.Absolute) => Left(BadRoot)
}
}
}
sealed abstract class Error(override val getMessage: String) extends Exception
case object Empty extends Error("Empty absolute path")
case object Relative extends Error("Expected absolute path, but found relative path")
case object BadRoot extends Error("Too many leading separators for absolute path")
}
| dcos/cosmos | cosmos-common/src/main/scala/com/mesosphere/util/AbsolutePath.scala | Scala | apache-2.0 | 1,583 |
/*
* Copyright (c) 2010 e.e d3si9n
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package scalaxb.compiler.xsd
import scalashim._
import scalaxb.compiler.Log
import scala.collection.mutable
import scalaxb.compiler.Module.camelCase
sealed abstract class Cardinality
case object Optional extends Cardinality { override def toString: String = "Optional" }
case object Single extends Cardinality { override def toString: String = "Single" }
case object Multiple extends Cardinality { override def toString: String = "Multiple" }
trait Params extends Lookup {
private val logger = Log.forName("xsd.Params")
val ATTRS_PARAM = "attributes"
val anyNumbers: mutable.Map[AnyDecl, Int] = mutable.Map()
case class Occurrence(minOccurs: Int, maxOccurs: Int, nillable: Boolean) {
def toSingle: Occurrence = copy(minOccurs = 1, maxOccurs = 1)
}
def toCardinality(minOccurs: Int, maxOccurs: Int): Cardinality =
if (maxOccurs > 1) Multiple
else if (minOccurs == 0) Optional
else Single
def toCardinality(occurrence: Occurrence): Cardinality =
toCardinality(occurrence.minOccurs, occurrence.maxOccurs)
def toCardinality(attr: AttributeDecl): Cardinality = {
val minOccurs = if (attr.use == RequiredUse ||
attr.fixedValue.isDefined || attr.defaultValue.isDefined) 1
else 0
toCardinality(minOccurs, 1)
}
case class Param(namespace: Option[String],
name: String,
typeSymbol: XsTypeSymbol,
cardinality: Cardinality,
nillable: Boolean,
global: Boolean,
qualified: Boolean,
attribute: Boolean) {
def baseTypeName: String = buildTypeName(typeSymbol)
def singleTypeName: String =
if (nillable) "Option[" + baseTypeName + "]"
else baseTypeName
def typeName: String = cardinality match {
case Single => singleTypeName
case Optional => "Option[" + singleTypeName + "]"
case Multiple => "Seq[" + singleTypeName + "]"
}
def toParamName: String = makeParamName(name, typeSymbol match {
case XsLongAttribute | XsAnyAttribute => false
case x if attribute => true
case _ => false
})
def toTraitScalaCode: String = toParamName + ": " + typeName
def toScalaCode: String =
toTraitScalaCode + (cardinality match {
case Single if typeSymbol == XsLongAttribute => " = Map()"
case Optional => " = None"
case Multiple => " = Nil"
case Single if nillable => " = None"
case _ => ""
})
def map(f: String => String): Param = this.copy(name = f(name))
}
def buildParam(decl: Decl): Param = decl match {
case elem: ElemDecl => buildParam(elem)
case attr: AttributeDecl => buildParam(attr)
case any: AnyAttributeDecl => buildParam(any)
case group: AttributeGroupDecl => buildParam(group)
case _ => sys.error("Params#buildParam: unsupported delcaration " + decl.toString)
}
def buildParam(elem: ElemDecl): Param = {
val typeSymbol = if (isSubstitutionGroup(elem)) buildSubstitionGroupSymbol(elem.typeSymbol)
else elem.typeSymbol match {
case ReferenceTypeSymbol(decl: ComplexTypeDecl) =>
if (compositorWrapper.contains(decl)) buildCompositorSymbol(compositorWrapper(decl), elem.typeSymbol)
else elem.typeSymbol
case _ => elem.typeSymbol
}
val nillable = elem.nillable getOrElse { false }
val retval = typeSymbol match {
case AnyType(symbol) if nillable =>
Param(elem.namespace, elem.name, XsNillableAny, toCardinality(elem.minOccurs, elem.maxOccurs), false, false, false, false)
case XsLongAttribute =>
Param(elem.namespace, elem.name, typeSymbol, toCardinality(elem.minOccurs, elem.maxOccurs), nillable, false, false, true)
case _ =>
Param(elem.namespace, elem.name, typeSymbol, toCardinality(elem.minOccurs, elem.maxOccurs), nillable, elem.global, elem.qualified, false)
}
logger.debug("buildParam: " + retval.toString)
retval
}
def buildParam(attr: AttributeDecl): Param = {
val name = if (!attr.global) attr.name
else makePrefix(attr.namespace, context) + attr.name
val retval = Param(attr.namespace, name, attr.typeSymbol, toCardinality(attr), false, false, false, true)
logger.debug("buildParam: " + retval.toString)
retval
}
def buildParam(group: AttributeGroupDecl): Param = {
val retval = Param(group.namespace, group.name,
new AttributeGroupSymbol(group.namespace, group.name), Single, false, false, false, true)
logger.debug("buildParam: " + retval.toString)
retval
}
def buildSubstitionGroupSymbol(typeSymbol: XsTypeSymbol): XsTypeSymbol =
XsDataRecord(typeSymbol)
def buildParam(any: AnyAttributeDecl): Param =
Param(None, ATTRS_PARAM, XsAnyAttribute, Single, false, false, false, true)
def buildCompositorSymbol(compositor: HasParticle, typeSymbol: XsTypeSymbol): XsTypeSymbol =
compositor match {
case ref: GroupRef =>
buildCompositorSymbol(buildGroup(ref), typeSymbol)
case group: GroupDecl =>
val primary = primaryCompositor(group)
val compositorRef = buildCompositorRef(primary, 0)
buildCompositorSymbol(primaryCompositor(group), compositorRef.typeSymbol)
case seq: SequenceDecl => typeSymbol
case _ => XsDataRecord(typeSymbol)
}
/// called by makeGroup
def buildParam(compositor: HasParticle): Param =
Param(None, "arg1", buildCompositorSymbol(compositor, buildCompositorRef(compositor, 0).typeSymbol),
toCardinality(compositor.minOccurs, compositor.maxOccurs), false, false, false, false)
def primaryCompositor(group: GroupDecl): HasParticle =
if (group.particles.size == 1) group.particles(0) match {
case seq: SequenceDecl =>
if (containsSingleChoice(seq)) singleChoice(seq)
else seq
case choice: ChoiceDecl => choice
case all: AllDecl => all
}
else sys.error("Params#primaryCompositor: group must contain one content model: " + group)
// context.compositorNames contains the definition of GroupDecl,
// while particle GroupDecl may differ in cardinality.
def groupTypeName(group: GroupDecl) =
makeTypeName(context.compositorNames(groups(group.namespace, group.name)))
def buildOccurrence(particle: Particle): Occurrence = particle match {
case compositor: HasParticle => buildOccurrence(compositor)
case elem: ElemDecl => Occurrence(elem.minOccurs, elem.maxOccurs, elem.nillable getOrElse {false})
case ref: ElemRef => Occurrence(ref.minOccurs, ref.maxOccurs,
(ref.nillable getOrElse {false}) || (buildElement(ref).nillable getOrElse {false}))
case any: AnyDecl => Occurrence(any.minOccurs, any.maxOccurs, false)
}
def buildOccurrence(compos: HasParticle): Occurrence = compos match {
case ref: GroupRef =>
val o = buildOccurrence(buildGroup(ref))
// nillability of primary compositor doesn not transfer to group or group refs
Occurrence(math.min(ref.minOccurs, o.minOccurs), math.max(ref.maxOccurs, o.maxOccurs), false)
case group: GroupDecl =>
val o = buildOccurrence(primaryCompositor(group))
// nillability of primary compositor doesn not transfer to group or group refs
Occurrence(math.min(group.minOccurs, o.minOccurs), math.max(group.maxOccurs, o.maxOccurs), false)
case choice: ChoiceDecl =>
val particleOccurences = choice.particles map {buildOccurrence}
val minOccurs = (choice.minOccurs :: particleOccurences.map(_.minOccurs)).min
val maxOccurs = (choice.maxOccurs :: particleOccurences.map(_.maxOccurs)).max
val nillable = choice.particles exists {
case elem: ElemDecl => elem.nillable getOrElse {false}
case ref: ElemRef =>
if (ref.nillable getOrElse {false}) true
else buildElement(ref).nillable getOrElse {false}
case _ => false
}
Occurrence(minOccurs, maxOccurs, nillable)
case _ =>
val minOccurs = if (isEmptyCompositor(compos)) 0
else compos.minOccurs
Occurrence(minOccurs, compos.maxOccurs, false)
}
def isEmptyCompositor(compos: HasParticle): Boolean = compos match {
case ref: GroupRef => isEmptyCompositor(buildGroup(ref))
case group: GroupDecl => isEmptyCompositor(primaryCompositor(group))
case choice: ChoiceDecl =>
choice.particles forall {
case compositor2: HasParticle => isEmptyCompositor(compositor2)
case _ => false
}
case _ => compos.particles.isEmpty
}
def mergeOccurrence(lhs: Occurrence, rhs: Occurrence): Occurrence =
Occurrence(math.min(lhs.minOccurs, rhs.minOccurs),
math.max(lhs.maxOccurs, rhs.maxOccurs),
lhs.nillable || rhs.nillable)
def buildLongAllRef(all: AllDecl) =
ElemDecl(Some(INTERNAL_NAMESPACE), "all", XsLongAll, None, None, 1, 1)
def buildLongAttributeRef =
ElemDecl(Some(INTERNAL_NAMESPACE), ATTRS_PARAM, XsLongAttribute, None, None, 1, 1)
def buildAnyRef(any: AnyDecl) = {
val anyNumber = anyNumbers.getOrElseUpdate(any, anyNumbers.size + 1)
val name = if (anyNumber <= 1) "any"
else "any" + anyNumber
ElemDecl(Some(INTERNAL_NAMESPACE), name, XsWildcard(any.namespaceConstraint), None, None, any.minOccurs, any.maxOccurs)
}
def buildCompositorRef(compositor: HasParticle, index: Int): ElemDecl =
buildCompositorRef(
compositor match {
case ref: GroupRef => buildGroup(ref)
case _ => compositor
},
compositor match {
// overriding nillable because nillable options are handled elsewhere.
case choice: ChoiceDecl => buildOccurrence(compositor).copy(nillable = false)
case _ => buildOccurrence(compositor)
},
index)
def buildCompositorRef(compositor: HasParticle, occurrence: Occurrence, index: Int): ElemDecl = {
val ns = compositor.namespace
val (typeName, name) = compositor match {
case group: GroupDecl =>
val tn = makeTypeName(context.compositorNames(primaryCompositor(group)))
(groupTypeName(group), camelCase(tn) + (index + 1).toString)
case _ =>
val tn = makeTypeName(context.compositorNames(compositor))
(tn, tn.toLowerCase)
}
val symbol = ReferenceTypeSymbol(ns, typeName)
val decl = ComplexTypeDecl(ns, symbol.localPart, List(symbol.name),
false, false, ComplexContentDecl.empty, Nil, None)
compositorWrapper(decl) = compositor
symbol.decl = decl
context.typeNames(decl) = typeName
logger.debug("buildCompositorRef: " + ns + " " + typeName)
ElemDecl(ns, name, symbol, None, None,
occurrence.minOccurs, occurrence.maxOccurs, Some(occurrence.nillable), false, false, None, None)
}
def buildChoiceTypeName(decl: ComplexTypeDecl, choice: ChoiceDecl,
shortLocal: Boolean): String =
if (choice.particles.size < 1) "scalaxb.DataRecord[Any]"
else {
val firstParticle = choice.particles(0)
def particleType(particle: Particle) = particle match {
case elem: ElemDecl => Some(elem.typeSymbol)
case ref: ElemRef => Some(buildElement(ref).typeSymbol)
case _ => None
}
def sameType: Option[XsTypeSymbol] = {
val firstType = particleType(firstParticle)
if (firstType.isEmpty) None
else if (choice.particles forall { particleType(_) == firstType }) firstType
else None
}
def isOptionDescendant(particle: Particle): Boolean = particle match {
case elem: ElemDecl =>
elem.typeSymbol match {
case ReferenceTypeSymbol(decl: ComplexTypeDecl) => true
case _ => false
}
case ref: ElemRef =>
buildElement(ref).typeSymbol match {
case ReferenceTypeSymbol(decl: ComplexTypeDecl) => true
case _ => false
}
case c: ChoiceDecl => c.particles forall { isOptionDescendant }
case seq: SequenceDecl => true
case _ => false
}
val member = sameType match {
case Some(AnyType(x)) => "Any"
case Some(x) => buildTypeName(x)
case None =>
if (!containsForeignType(choice) &&
(choice.particles forall { isOptionDescendant }) ) buildTypeName(decl, shortLocal)
else "Any"
}
if (buildOccurrence(choice).nillable) "scalaxb.DataRecord[Option[" + member + "]]"
else "scalaxb.DataRecord[" + member + "]"
}
}
| justjoheinz/scalaxb | cli/src/main/scala/scalaxb/compiler/xsd/Params.scala | Scala | mit | 13,579 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.ids.cas
import org.beangle.commons.collection.Collections
import scala.collection.mutable
/** Cas 服务设置
*
*/
class CasSetting {
/** 是否登录界面启用验证码 */
var enableCaptcha: Boolean = _
/** 登录界面是否显示Local/Remote选项 */
var displayLoginSwitch: Boolean = _
/** 防止跨站攻击的key,用于加密生成cookie */
var key: String = _
/** 本站的源地址 */
var origin: String = _
/** 是否强制使用https */
var forceHttps: Boolean = _
/** 是否检查密码强度 */
var checkPasswordStrength: Boolean = _
/** 允许的client */
var clients: mutable.Buffer[String] = Collections.newBuffer[String]
/** 其他SSO登录地址 */
var remoteLoginUrl: Option[String] = None
/** 其他SSO退出地址 */
var remoteLogoutUrl: Option[String] = None
/** 本地密码是否是只读 */
var passwordReadOnly: Boolean = _
}
| beangle/ids | cas/src/main/scala/org/beangle/ids/cas/CasSetting.scala | Scala | lgpl-3.0 | 1,643 |
// Project: slogging
// Module:
// Description:
// Distributed under the MIT License (see included file LICENSE)
package slogging
import java.io.PrintStream
object PrintLogger extends LoggerTemplate {
import PrintLoggerFactory._
final override def logMessage(level: MessageLevel, name: String, message: String, cause: Option[Throwable]): Unit = (level match {
case MessageLevel.`error` => errorStream
case MessageLevel.`warn` => warnStream
case MessageLevel.`info` => infoStream
case MessageLevel.`debug` => debugStream
case MessageLevel.`trace` => traceStream
}).println(formatter.formatMessage(level,name,message,cause))
}
object PrintLoggerFactory extends UnderlyingLoggerFactory {
private var _errorStream: PrintStream = System.err
private var _warnStream: PrintStream = System.err
private var _infoStream: PrintStream = System.err
private var _debugStream: PrintStream = System.err
private var _traceStream: PrintStream = System.err
private var _formatter: MessageFormatter = MessageFormatter.default
@inline
final def errorStream: PrintStream = _errorStream
final def errorStream_=(s: PrintStream): Unit = this.synchronized{ _errorStream = s }
@inline
final def warnStream: PrintStream = _warnStream
final def warnStream_=(s: PrintStream): Unit = this.synchronized{ _warnStream = s }
@inline
final def infoStream: PrintStream = _infoStream
final def infoStream_=(s: PrintStream): Unit = this.synchronized{ _infoStream = s }
@inline
final def debugStream: PrintStream = _debugStream
final def debugStream_=(s: PrintStream): Unit = this.synchronized{ _debugStream = s }
@inline
final def traceStream: PrintStream = _traceStream
final def traceStream_=(s: PrintStream): Unit = this.synchronized{ _traceStream = s }
@inline
final def formatter: MessageFormatter = _formatter
final def formatter_=(f: MessageFormatter): Unit = this.synchronized{ _formatter = f }
@inline
override def getUnderlyingLogger(name: String): UnderlyingLogger = PrintLogger
}
| jokade/slogging | shared/src/main/scala/slogging/PrintLogger.scala | Scala | mit | 2,055 |
package scala.meta.internal.semanticdb.scalac
import scala.{meta => m}
import scala.reflect.internal.{Flags => gf}
import scala.meta.internal.scalacp._
import scala.meta.internal.{semanticdb => s}
import scala.meta.internal.semanticdb.{Language => l}
import scala.meta.internal.semanticdb.Scala.{DisplayNames => dn}
import scala.meta.internal.semanticdb.SymbolInformation.{Property => p}
import scala.meta.internal.semanticdb.SymbolInformation.{Kind => k}
trait SymbolInformationOps { self: SemanticdbOps =>
import g._
implicit class XtensionGSymbolMSymbolInformation(gsym0: g.Symbol) {
private val gsym: g.Symbol = {
if (gsym0.isJavaClass) gsym0.companionClass
else if (gsym0.isModuleClass) gsym0.asClass.module
else if (gsym0.isTypeSkolem) gsym0.deSkolemize
else gsym0
}
private def language: s.Language = {
if (gsym.hasPackageFlag) l.SCALA
else if (gsym.hasFlag(gf.JAVA)) l.JAVA
else if (gsym.isParameter && gsym.owner.hasFlag(gf.JAVA)) l.JAVA
else l.SCALA
}
private[meta] def kind: s.SymbolInformation.Kind = {
gsym match {
case _ if gsym.isSelfParameter =>
k.SELF_PARAMETER
case gsym: MethodSymbol =>
if (gsym.isConstructor) k.CONSTRUCTOR
else if (gsym.isMacro) k.MACRO
else if (gsym.isGetter && gsym.isLazy && gsym.isLocalToBlock) k.LOCAL
else k.METHOD
case gsym: ModuleSymbol =>
if (gsym.hasPackageFlag) k.PACKAGE
else if (gsym.isPackageObject) k.PACKAGE_OBJECT
else k.OBJECT
case gsym: TermSymbol =>
if (gsym.isParameter) k.PARAMETER
else if (gsym.isLocalToBlock) k.LOCAL
else if (gsym.isJavaDefined || gsym.hasJavaEnumFlag) k.FIELD
else k.METHOD
case gsym: ClassSymbol =>
if (gsym.isTrait && gsym.hasFlag(gf.JAVA)) k.INTERFACE
else if (gsym.isTrait) k.TRAIT
else if (gsym.isClassfileAnnotation) k.INTERFACE
else k.CLASS
case gsym: TypeSymbol =>
if (gsym.isParameter) k.TYPE_PARAMETER
else k.TYPE
case NoSymbol =>
k.UNKNOWN_KIND
case _ =>
sys.error(s"unsupported symbol $gsym")
}
}
private[meta] def properties: Int = {
val kind = this.kind
var flags = 0
def flip(prop: s.SymbolInformation.Property): Unit = flags |= prop.value
def isAbstractClass =
gsym.isClass && gsym.isAbstract && !gsym.isTrait && !gsym.hasFlag(gf.JAVA_ENUM)
def isAbstractMethod = gsym.isMethod && gsym.isDeferred
def isAbstractType = gsym.isType && !gsym.isParameter && gsym.isDeferred
def isObject = gsym.isModule && !gsym.hasFlag(gf.PACKAGE)
if (gsym.hasFlag(gf.PACKAGE)) {
()
} else if (gsym.hasFlag(gf.JAVA)) {
if (isAbstractClass || kind.isInterface || isAbstractMethod) flip(p.ABSTRACT)
if (gsym.hasFlag(gf.FINAL) || gsym.hasFlag(gf.JAVA_ENUM)) flip(p.FINAL)
if (gsym.hasFlag(gf.JAVA_ENUM)) flip(p.ENUM)
if (gsym.hasFlag(gf.STATIC) && !gsym.hasFlag(gf.INTERFACE)) flip(p.STATIC)
if (gsym.isDefaultMethod) flip(p.DEFAULT)
} else {
if (isAbstractClass || isAbstractMethod || isAbstractType) flip(p.ABSTRACT)
if (gsym.hasFlag(gf.FINAL) || isObject) flip(p.FINAL)
if (gsym.hasFlag(gf.SEALED)) flip(p.SEALED)
if (gsym.hasFlag(gf.IMPLICIT)) flip(p.IMPLICIT)
if (gsym.hasFlag(gf.LAZY)) flip(p.LAZY)
if (gsym.hasFlag(gf.CASE) && (gsym.isClass || gsym.isModule)) flip(p.CASE)
if (gsym.isType && gsym.hasFlag(gf.CONTRAVARIANT)) flip(p.CONTRAVARIANT)
if (gsym.isType && gsym.hasFlag(gf.COVARIANT)) flip(p.COVARIANT)
if (kind.isLocal || gsym.isUsefulField) {
if (gsym.isMutable) flip(p.VAR)
else flip(p.VAL)
}
if (gsym.isGetter || gsym.isSetter) {
if (gsym.isStable) flip(p.VAL)
else flip(p.VAR)
}
if (gsym.isParameter && gsym.owner.isPrimaryConstructor) {
val gaccessor = gsym.owner.owner.info.decl(gsym.name)
if (gaccessor != g.NoSymbol && !gaccessor.isStable) flip(p.VAR)
else if (gaccessor != g.NoSymbol && gaccessor.isMethod) flip(p.VAL)
else ()
}
if (gsym.isPrimaryConstructor) flip(p.PRIMARY)
if (gsym.isDefaultParameter) flip(p.DEFAULT)
}
flags
}
private def displayName: String = {
if (gsym.isRootPackage) dn.RootPackage
else if (gsym.isEmptyPackage) dn.EmptyPackage
else if (gsym.isConstructor) dn.Constructor
else if (gsym.name.startsWith("_$")) dn.Anonymous
else if (gsym.isPackageObject || gsym.isPackageObjectClass) gsym.owner.symbolName
else gsym.symbolName
}
private def sig(linkMode: LinkMode): s.Signature = {
if (gsym.hasPackageFlag) {
s.NoSignature
} else {
val gsig = {
if (gsym.hasFlag(gf.JAVA_ENUM) && gsym.isStatic) {
gsym.info.widen
} else if (gsym.isAliasType) {
def preprocess(info: g.Type): g.Type = {
info match {
case g.PolyType(tparams, tpe) => g.PolyType(tparams, preprocess(tpe))
case tpe => g.TypeBounds(tpe, tpe)
}
}
preprocess(gsym.info)
} else if (gsym.isModule) {
gsym.moduleClass.info
} else {
gsym.info
}
}
val ssig = gsig.toSemanticSig(linkMode)
if (gsym.isConstructor) {
ssig match {
case m: s.MethodSignature => m.copy(returnType = s.NoType)
case m => m
}
} else if (gsym.isScalacField) {
ssig match {
case ssig: s.ValueSignature =>
val stparams = Some(s.Scope())
val sparamss = Nil
val sret = ssig.tpe
s.MethodSignature(stparams, sparamss, sret)
case _ =>
sys.error(s"unsupported signature: ${ssig.getClass} $ssig")
}
} else if (gsym.isSelfParameter) {
gsym.owner.self.toSemanticTpe match {
case s.NoType => s.NoSignature
case stpe => s.ValueSignature(stpe)
}
} else if (gsym.isClassfileAnnotation) {
ssig match {
case ssig: s.ClassSignature =>
val parents1 = ssig.parents.flatMap {
case s.TypeRef(s.NoType, "scala/annotation/Annotation#", Nil) =>
Some(s.TypeRef(s.NoType, "java/lang/Object#", Nil))
case s.TypeRef(s.NoType, "scala/annotation/ClassfileAnnotation#", Nil) =>
None
case sother =>
Some(sother)
}
ssig.copy(parents = parents1)
case _ =>
sys.error(s"unsupported signature: ${ssig.getClass} $ssig")
}
} else if (gsym.hasFlag(gf.JAVA) && kind == k.TYPE_PARAMETER) {
ssig match {
case ssig: s.TypeSignature =>
val upperBound1 = ssig.upperBound match {
case s.StructuralType(s.WithType(tpes), _) =>
s.IntersectionType(tpes)
case s.TypeRef(s.NoType, "scala/Any#", Nil) =>
s.TypeRef(s.NoType, "java/lang/Object#", Nil)
case sother =>
sother
}
ssig.copy(lowerBound = s.NoType, upperBound = upperBound1)
case _ =>
sys.error(s"unsupported signature: ${ssig.getClass} $ssig")
}
} else {
ssig
}
}
}
private def annotations: List[s.Annotation] = {
val ganns = gsym.annotations.filter { gann =>
gann.atp.typeSymbol != definitions.MacroImplAnnotation
}
ganns.map(_.toSemantic)
}
private def access: s.Access = {
kind match {
case k.LOCAL | k.PARAMETER | k.SELF_PARAMETER | k.TYPE_PARAMETER | k.PACKAGE |
k.PACKAGE_OBJECT =>
s.NoAccess
case _ =>
if (gsym.hasFlag(gf.SYNTHETIC) && gsym.hasFlag(gf.ARTIFACT)) {
// NOTE: some sick artifact vals produced by mkPatDef can be
// private to method (whatever that means), so here we just ignore them.
s.PublicAccess()
} else {
if (gsym.privateWithin == NoSymbol) {
if (gsym.isPrivateThis) s.PrivateThisAccess()
else if (gsym.isPrivate) s.PrivateAccess()
else if (gsym.isProtectedThis) s.ProtectedThisAccess()
else if (gsym.isProtected) s.ProtectedAccess()
else s.PublicAccess()
} else {
val ssym = gsym.privateWithin.ssym
if (gsym.isProtected) s.ProtectedWithinAccess(ssym)
else s.PrivateWithinAccess(ssym)
}
}
}
}
def toSymbolInformation(linkMode: LinkMode): s.SymbolInformation = {
s.SymbolInformation(
symbol = gsym.ssym,
language = language,
kind = kind,
properties = properties,
displayName = displayName,
signature = sig(linkMode),
annotations = annotations,
access = access
)
}
}
}
| olafurpg/scalameta | semanticdb/scalac/library/src/main/scala/scala/meta/internal/semanticdb/scalac/SymbolInformationOps.scala | Scala | bsd-3-clause | 9,301 |
package com.sfxcode.sapphire.core
import com.typesafe.config.{ Config, ConfigFactory }
import com.typesafe.scalalogging.LazyLogging
import scala.jdk.CollectionConverters._
trait ConfigValues extends LazyLogging {
val config: Config = ConfigFactory.load()
def configBooleanValue(path: String, defaultReturnValue: Boolean = false): Boolean =
configValue[Boolean](path, defaultReturnValue, config.getBoolean)
private def configValue[E <: Any](path: String, defaultReturnValue: E = None, f: String => E): E =
if (config.hasPath(path)) {
var result = defaultReturnValue
try result = f(path)
catch {
case e: Exception =>
logger.error(e.getMessage, e)
}
result
} else {
logger.warn("config path: %s not exist".format(path))
defaultReturnValue
}
def configStringValue(path: String, defaultReturnValue: String = ""): String =
configValue[String](path, defaultReturnValue, config.getString)
def configIntValue(path: String, defaultReturnValue: Int = 0): Int =
configValue[Int](path, defaultReturnValue, config.getInt)
def configLongValue(path: String, defaultReturnValue: Long = 0): Long =
configValue[Long](path, defaultReturnValue, config.getLong)
def configDoubleValue(path: String, defaultReturnValue: Double = 0.0): Double =
configValue[Double](path, defaultReturnValue, config.getDouble)
def configBooleanValues(path: String): List[Boolean] =
configValues[Boolean](path, config.getBooleanList)
private def configValues[E <: Any](path: String, f: String => java.util.List[_]): List[E] =
if (config.hasPath(path)) {
var result = List[E]()
try result = f(path).asScala.toList.asInstanceOf[List[E]]
catch {
case e: Exception =>
logger.error(e.getMessage, e)
}
result
} else {
logger.warn("config path: %s not exist".format(path))
List()
}
def configStringValues(path: String): List[String] =
configValues[String](path, config.getStringList)
def configIntValues(path: String): List[Int] =
configValues[Int](path, config.getIntList)
def configLongValues(path: String): List[Long] =
configValues[Long](path, config.getLongList)
def configDoubleValues(path: String): List[Double] =
configValues[Double](path, config.getDoubleList)
}
| sfxcode/sapphire-core | src/main/scala/com/sfxcode/sapphire/core/ConfigValues.scala | Scala | apache-2.0 | 2,348 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2.orc
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.mapreduce.{Job, TaskAttemptContext}
import org.apache.orc.OrcConf.{COMPRESS, MAPRED_OUTPUT_SCHEMA}
import org.apache.orc.mapred.OrcStruct
import org.apache.spark.sql.execution.datasources.{OutputWriter, OutputWriterFactory}
import org.apache.spark.sql.execution.datasources.orc.{OrcFileFormat, OrcOptions, OrcOutputWriter, OrcUtils}
import org.apache.spark.sql.execution.datasources.v2.FileWriteBuilder
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.sql.util.CaseInsensitiveStringMap
class OrcWriteBuilder(
options: CaseInsensitiveStringMap,
paths: Seq[String],
formatName: String,
supportsDataType: DataType => Boolean)
extends FileWriteBuilder(options, paths, formatName, supportsDataType) {
override def prepareWrite(
sqlConf: SQLConf,
job: Job,
options: Map[String, String],
dataSchema: StructType): OutputWriterFactory = {
val orcOptions = new OrcOptions(options, sqlConf)
val conf = job.getConfiguration
conf.set(MAPRED_OUTPUT_SCHEMA.getAttribute, OrcFileFormat.getQuotedSchemaString(dataSchema))
conf.set(COMPRESS.getAttribute, orcOptions.compressionCodec)
conf.asInstanceOf[JobConf]
.setOutputFormat(classOf[org.apache.orc.mapred.OrcOutputFormat[OrcStruct]])
new OutputWriterFactory {
override def newInstance(
path: String,
dataSchema: StructType,
context: TaskAttemptContext): OutputWriter = {
new OrcOutputWriter(path, dataSchema, context)
}
override def getFileExtension(context: TaskAttemptContext): String = {
val compressionExtension: String = {
val name = context.getConfiguration.get(COMPRESS.getAttribute)
OrcUtils.extensionsForCompressionCodecNames.getOrElse(name, "")
}
compressionExtension + ".orc"
}
}
}
}
| pgandhi999/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcWriteBuilder.scala | Scala | apache-2.0 | 2,807 |
package com.twitter.finatra.http
import com.twitter.finagle.Filter
import com.twitter.finagle.httpx.Method._
import com.twitter.finagle.httpx.{Request, Response}
import com.twitter.inject.Injector
import scala.collection.mutable.ArrayBuffer
private[http] trait RouteDSL { self =>
private type HttpFilter = Filter[Request, Response, Request, Response]
private[http] val routeBuilders = ArrayBuffer[RouteBuilder[_, _]]()
private[http] val annotations = getClass.getDeclaredAnnotations
private[http] def buildFilter(injector: Injector): HttpFilter = Filter.identity
protected def filter[FilterType <: HttpFilter : Manifest] = new RouteDSL {
override val routeBuilders = self.routeBuilders
override val annotations = self.annotations
override def buildFilter(injector: Injector) = self.buildFilter(injector).andThen(injector.instance[FilterType])
}
def filter(next: HttpFilter) = new RouteDSL {
override val routeBuilders = self.routeBuilders
override def buildFilter(injector: Injector) = self.buildFilter(injector).andThen(next)
}
def get[RequestType: Manifest, ResponseType: Manifest](route: String, name: String = "")(callback: RequestType => ResponseType): Unit = routeBuilders += new RouteBuilder(Get, route, name, callback, self)
def post[RequestType: Manifest, ResponseType: Manifest](route: String, name: String = "")(callback: RequestType => ResponseType): Unit = routeBuilders += new RouteBuilder(Post, route, name, callback, self)
def put[RequestType: Manifest, ResponseType: Manifest](route: String, name: String = "")(callback: RequestType => ResponseType): Unit = routeBuilders += new RouteBuilder(Put, route, name, callback, self)
def delete[RequestType: Manifest, ResponseType: Manifest](route: String, name: String = "")(callback: RequestType => ResponseType): Unit = routeBuilders += new RouteBuilder(Delete, route, name, callback, self)
def options[RequestType: Manifest, ResponseType: Manifest](route: String, name: String = "")(callback: RequestType => ResponseType): Unit = routeBuilders += new RouteBuilder(Options, route, name, callback, self)
def patch[RequestType: Manifest, ResponseType: Manifest](route: String, name: String = "")(callback: RequestType => ResponseType): Unit = routeBuilders += new RouteBuilder(Patch, route, name, callback, self)
def head[RequestType: Manifest, ResponseType: Manifest](route: String, name: String = "")(callback: RequestType => ResponseType): Unit = routeBuilders += new RouteBuilder(Head, route, name, callback, self)
def trace[RequestType: Manifest, ResponseType: Manifest](route: String, name: String = "")(callback: RequestType => ResponseType): Unit = routeBuilders += new RouteBuilder(Trace, route, name, callback, self)
}
| deanh/finatra | http/src/main/scala/com/twitter/finatra/http/RouteDSL.scala | Scala | apache-2.0 | 2,749 |
// Use Scala's special syntax for defining *anonymous Functions*
object Foo {
def isGood(x: Int): Boolean = { x % 2 == 0 }
}
object UseCase {
val predicate: Int => Boolean =
(x: Int) => Foo.isGood(x)
}
| agconti/scala-school | 04-functions-as-values/slides/slide049.scala | Scala | mit | 221 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js tools **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013-2015, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.core.tools.linker.frontend
import org.scalajs.core.tools.logging.Logger
import org.scalajs.core.tools.io.VirtualScalaJSIRFile
import org.scalajs.core.tools.sem.Semantics
import org.scalajs.core.tools.javascript.ESLevel
import org.scalajs.core.tools.linker.LinkingUnit
import org.scalajs.core.tools.linker.analyzer.SymbolRequirement
import org.scalajs.core.tools.linker.frontend.optimizer.{GenIncOptimizer, IncOptimizer}
/** The frontend of the Scala.js linker. Produces a [[LinkingUnit]]
*
* You probably want to use an instance of [[linker.Linker]], rather than this
* low-level class.
*
* Attention: [[LinkerFrontend]] does not cache the IR input. It is advisable to do
* so, unless all IR is already in memory.
*/
final class LinkerFrontend(
val semantics: Semantics,
val esLevel: ESLevel,
val withSourceMap: Boolean,
config: LinkerFrontend.Config,
optimizerFactory: Option[GenIncOptimizer.OptimizerFactory]) {
private[this] val linker: BaseLinker =
new BaseLinker(semantics, esLevel, withSourceMap)
private[this] val optOptimizer: Option[GenIncOptimizer] =
optimizerFactory.map(_(semantics, esLevel, withSourceMap))
private[this] val refiner: Refiner = new Refiner
/** Link and optionally optimize the given IR to a [[LinkingUnit]]. */
def link(irFiles: Seq[VirtualScalaJSIRFile],
symbolRequirements: SymbolRequirement, logger: Logger): LinkingUnit = {
val preOptimizerRequirements = optOptimizer.fold(symbolRequirements) {
optimizer => symbolRequirements ++ optimizer.symbolRequirements
}
val linkResult = logger.time("Basic Linking") {
linker.linkInternal(irFiles, logger, preOptimizerRequirements,
config.bypassLinkingErrors, config.checkIR)
}
optOptimizer.fold(linkResult) { optimizer =>
if (linkResult.isComplete) {
optimize(linkResult, symbolRequirements, optimizer, logger)
} else {
logger.warn("Not running the optimizer because there where linking errors.")
linkResult
}
}
}
private def optimize(unit: LinkingUnit, symbolRequirements: SymbolRequirement,
optimizer: GenIncOptimizer, logger: Logger): LinkingUnit = {
val optimized = logger.time("Inc. optimizer") {
optimizer.update(unit, logger)
}
logger.time("Refiner") {
refiner.refine(optimized, symbolRequirements, logger)
}
}
}
object LinkerFrontend {
/** Configurations relevant to the frontend */
final class Config private (
/** Whether to only warn if the linker has errors. */
val bypassLinkingErrors: Boolean = false,
/** If true, performs expensive checks of the IR for the used parts. */
val checkIR: Boolean = false
) {
@deprecated(
"Bypassing linking errors will not be possible in the next major version.",
"0.6.6")
def withBypassLinkingErrors(bypassLinkingErrors: Boolean): Config =
copy(bypassLinkingErrors = bypassLinkingErrors)
// Non-deprecated version to call from the sbt plugin
private[scalajs] def withBypassLinkingErrorsInternal(
bypassLinkingErrors: Boolean): Config = {
copy(bypassLinkingErrors = bypassLinkingErrors)
}
def withCheckIR(checkIR: Boolean): Config =
copy(checkIR = checkIR)
private def copy(
bypassLinkingErrors: Boolean = bypassLinkingErrors,
checkIR: Boolean = checkIR): Config = {
new Config(bypassLinkingErrors, checkIR)
}
}
object Config {
def apply(): Config = new Config()
}
}
| mdedetrich/scala-js | tools/shared/src/main/scala/org/scalajs/core/tools/linker/frontend/LinkerFrontend.scala | Scala | bsd-3-clause | 4,071 |
object Test {
inline def encode(n: String): List[String] =
for {
a <- List("s")
b <- List("w")
c <- encode(n)
} yield c
def encode0(n: String) = encode(n) // error
}
| som-snytt/dotty | tests/disabled/neg/i3081.scala | Scala | apache-2.0 | 197 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.backend.api.dataframes
import ai.h2o.sparkling.utils.SparkSessionUtils
import water.exceptions.H2ONotFoundArgumentException
trait DataFrameCommons {
def validateDataFrameId(dataFrameId: String): Unit = {
if (!SparkSessionUtils.active.sqlContext.tableNames().contains(dataFrameId)) {
throw new H2ONotFoundArgumentException(s"DataFrame with id '$dataFrameId' does not exist!")
}
}
}
| h2oai/sparkling-water | core/src/main/scala/ai/h2o/sparkling/backend/api/dataframes/DataFrameCommons.scala | Scala | apache-2.0 | 1,227 |
package io.prediction.examples.pfriendrecommendation
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkContext._
import org.apache.spark.graphx._
import org.apache.spark.rdd.RDD
import scala.collection.mutable.ListBuffer
import scala.collection.mutable.Map
object DeltaSimRankRDD {
var decay:Double = 0.8
var numNodes:Int = 0
def calculateNthIter(
numNodes:Int,
g:Graph[Int, Int],
prevDelta:RDD[((VertexId,VertexId),Double)],
outDegreeMap:scala.collection.Map[VertexId,Long])
: RDD[((VertexId,VertexId), Double)] =
{
// No changes in last iteration -> no changes this iteration.
if (prevDelta.count() == 0)
return prevDelta
val pairList = prevDelta.toArray
val kvPairs = pairList.map(pair => {
val a = pair._1._1.toInt
val b = pair._1._2.toInt
val delta = pair._2
val b_adj = g.edges.filter(e => e.dstId == b).map(x=>x.srcId)
val a_adj = g.edges.filter(e => e.dstId == a).map(x=>x.srcId)
val scorePairs = a_adj.cartesian(b_adj)
scorePairs.filter(pair=> pair._1 != pair._2).map(pair => (pair, delta))
})
var union = kvPairs(0)
var index = 0
for (index <- 1 to kvPairs.size-1)
union = union ++ kvPairs(index)
val newDelta = union.reduceByKey(_ + _)
.map(k =>
(k._1, k._2*decay/(outDegreeMap(k._1._1) + outDegreeMap(k._1._2)))
)
newDelta
}
def identityMatrix(sc:SparkContext, numCols:Long) : RDD[(Long, Double)] =
{
val numElements = numCols * numCols
val arr = Array[Long]((0L to numElements).toList:_*)
// (Score, Index), where (x,y) = (Index/numCols, Index%numCols)
val pairs = arr.map(x => {
if (x/numCols == x % numCols)
(x, 1.0)
else
(x, 0.0)
})
sc.parallelize(pairs)
}
def matrixToIndices(x:Int, y:Int, numCols:Int) = {
x + y * numCols
}
def joinDelta(
prevIter:RDD[(Long, Double)],
numCols:Int,
delta:RDD[((VertexId,VertexId), Double)]) : RDD[(Long,Double)] =
{
val deltaToIndex:RDD[(Long,Double)] = delta.map(x => {
val index = x._1._1*numCols + x._1._2
(index, x._2)
})
val newIter = prevIter.leftOuterJoin(deltaToIndex)
val newScores = newIter.map(x => {
val index = x._1
if (x._2._2.isDefined) {
(index, x._2._1 + x._2._2.get)
} else {
(index, x._2._1)
}
})
newScores
}
def getOutdegreeMap(g:Graph[Int,Int]) : scala.collection.Map[VertexId, Long] =
{
g.edges.map(edge => (edge.srcId,1L))
.reduceByKey(_ + _)
.collectAsMap()
}
def compute(
g:Graph[Int,Int],
numIterations:Int,
identityMatrix:RDD[(VertexId,Double)],
newDecay:Double) : RDD[(VertexId,Double)] =
{
numNodes = g.vertices.count().toInt
decay = newDecay
// Build the identity matrix representing 0-th iteration of SimRank
val s0 = identityMatrix
val outDegreeMap:scala.collection.Map[VertexId,Long] = getOutdegreeMap(g)
val s0Delta = g.vertices.map(vertex => ((vertex._1, vertex._1), 1.0))
var prevSimrank = s0
var prevDelta = s0Delta
for (i <- 0 to numIterations) {
val nextIterDelta = calculateNthIter(numNodes, g, prevDelta, outDegreeMap)
val nextIterSimrank = joinDelta(prevSimrank, numNodes, nextIterDelta)
prevSimrank = nextIterSimrank
prevDelta = nextIterDelta
}
prevSimrank
}
// Make all vertexId in one contiguous number range
def normalizeGraph(g:Graph[Int,Int]) = {
var counter = 0.toLong
val hash = Map[VertexId, Long]()
val v = g.vertices.map( pair => {
hash(pair._1) = counter
counter += 1
(counter - 1, pair._2)
})
val e = g.edges.map( (e:Edge[Int]) => {
if (hash.contains(e.srcId)) {
e.srcId = hash(e.srcId)
} else {
hash += (e.srcId -> counter)
counter += 1
e.srcId = counter - 1
}
if (hash.contains(e.dstId)) {
e.dstId = hash(e.dstId)
} else {
hash += (e.dstId -> counter)
counter += 1
e.dstId = counter - 1
}
e
})
val g2 = Graph(v,e)
g2
}
}
| TheDataShed/PredictionIO | examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DeltaSimRankRDD.scala | Scala | apache-2.0 | 4,190 |
package chandu0101.scalajsreact.components
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
object Footer {
val component = ReactComponentB.static("Footer",
<.footer(^.textAlign.center,
<.div(^.borderBottom := "1px solid grey", ^.padding := "0px"),
<.p(^.paddingTop := "5px", "Built using scalajs/scalajs-react/scalacss")
)
).buildU
def apply() = component()
}
| chandu0101/reactjs-to-scalajs | src/main/scala/chandu0101/scalajsreact/components/Footer.scala | Scala | apache-2.0 | 417 |
package org.yotchang4s.ch2.response
import org.yotchang4s.ch2._
import org.yotchang4s.ch2.thread.ThreadId
trait ResponseComponent {
val response: ResponseRepository
trait ResponseRepository {
def findResponses(threadId: ThreadId)(implicit config: Ch2Config): Either[Ch2Exception, (String, List[Response])]
}
} | yotchang4s/gikolet | src/org/yotchang4s/ch2/response/ResponseComponent.scala | Scala | bsd-3-clause | 322 |
package quizleague {
package object rest {
import io.circe._, io.circe.parser._
def deser[T](body: String)(implicit decoder: Decoder[T]): T = decode[T](body).fold(throw _, identity)
}
} | gumdrop/quizleague-maintain | jvm/src/main/scala/quizleague/rest/package.scala | Scala | mit | 200 |
package com.wix.fax.phaxio.model
/**
* @see <a href="http://www.phaxio.com/docs/errorMessages/">Error Messages</a>
*/
object ErrorType {
/** documentConversionErrors usually occur when there is a problem with one of the files you posted. */
val documentConversionError = "documentConversionError"
/** There was a problem with the phone line. The call could not be placed. */
val lineError = "lineError"
/** A problem occured during the fax communication process. */
val faxError = "faxError"
/** Something went wrong internally on our systems. This should occur very infrequently. */
val fatalError = "fatalError"
/**
* Similar to fatalError but higher priority. As soon as one of these occurs, rest assured that we're looking into it
* immediately and will follow up with what went wrong.
*/
val generalError = "generalError"
}
| wix/libfax | libfax-phaxio-core/src/main/scala/com/wix/fax/phaxio/model/ErrorType.scala | Scala | apache-2.0 | 889 |
package scala.lms
package util
import java.util.{ArrayDeque, HashMap}
object GraphUtil {
class Ref[T](init: T) {
var value: T = init
}
/* test cases
stronglyConnectedComponents[String](List("A"), { case "A" => List("B") case "B" => List("C") case "C" => List("A","D") case "D" => Nil})
List(List(A, B, C), List(D))
stronglyConnectedComponents[String](List("A","B","C"), { case "A" => List("B") case "B" => List("C") case "C" => List("A","D") case "D" => Nil})
*/
/**
Returns the strongly connected components
of the graph rooted at the first argument,
whose edges are given by the function argument.
The scc are returned in topological order.
Tarjan's algorithm (linear).
*/
def stronglyConnectedComponents[T](start: List[T], succ: T=>List[T]): List[List[T]] = {
val id: Ref[Int] = new Ref(0)
val stack = new ArrayDeque[T]
val mark = new HashMap[T,Int]
val res = new Ref[List[List[T]]](Nil)
for (node <- start)
visit(node,succ,id,stack,mark,res)
res.value
}
def visit[T](node: T, succ: T=>List[T], id: Ref[Int], stack: ArrayDeque[T],
mark: HashMap[T,Int], res: Ref[List[List[T]]]): Int = {
if (mark.containsKey(node))
mark.get(node)
else {
id.value = id.value + 1
mark.put(node, id.value)
stack.addFirst(node)
// println("push " + node)
var min: Int = id.value
for (child <- succ(node)) {
val m = visit(child, succ, id, stack, mark, res)
if (m < min)
min = m
}
if (min == mark.get(node)) {
var scc: List[T] = Nil
var loop: Boolean = true
do {
val element = stack.removeFirst()
// println("appending " + element)
scc ::= element
mark.put(element, Integer.MAX_VALUE)
loop = element != node
} while (loop)
res.value ::= scc
}
min
}
}
}
| astojanov/virtualization-lms-core | src/util/GraphUtil.scala | Scala | bsd-3-clause | 1,967 |
/*
* Copyright (c) 2015-2016 Luciano Resende
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.luck.streaming
import java.io._
import java.net.{InetAddress, ServerSocket, Socket, SocketException}
import java.util.Random
object StreamingSocketGenerator {
val ServerPort = 9999;
def main(args: Array[String]): Unit = {
try {
println("Starting streaming application...") // scalastyle:ignore
val serverSocket = new ServerSocket(ServerPort)
while (true) {
new ServerThread(serverSocket.accept()).start()
}
serverSocket.close()
}
catch {
case e: IOException =>
System.err.println("Could not listen on port: 9999."); //scalastyle:ignore
System.exit(-1)
}
}
}
case class ServerThread(socket: Socket) extends Thread("ServerThread") {
def generateJson(symbol: String): String = {
val rand = new Random(System.currentTimeMillis());
val price = rand.nextFloat();
val date = java.time.LocalDateTime.now().toString()
val json: String = "{\"symbol\":\"%s\", \"price\":%f, \"date\":\"%s\"}"
.format(symbol, price, date)
return json
}
override def run(): Unit = {
println("Starting streaming thread...") // scalastyle:ignore
var symbols = Array("LNKD", "IBM", "APPL")
try {
val out = new PrintWriter(socket.getOutputStream(), true)
val in = new BufferedReader( new InputStreamReader(socket.getInputStream))
var count = 0
while (count < 1000) {
count = count + 1
for(s <- symbols) {
val json: String = generateJson(s)
println(f"Submitting ==> $json") // scalastyle:ignore
out.println(json) // scalastyle:ignore
Thread.sleep(10)
}
}
out.close()
in.close()
socket.close()
}
catch {
case e: SocketException =>
() // avoid stack trace when stopping a client with Ctrl-C
case e: IOException =>
e.printStackTrace();
}
}
} | lresende/spark-sandbox | src/main/scala/com/luck/streaming/StreamingSocketGenerator.scala | Scala | apache-2.0 | 2,536 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.util.Collections
import java.util.concurrent.atomic.AtomicInteger
import kafka.api.LeaderAndIsr
import kafka.utils.{MockScheduler, MockTime}
import kafka.zk.KafkaZkClient
import org.apache.kafka.clients.ClientResponse
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.errors.{AuthenticationException, UnsupportedVersionException}
import org.apache.kafka.common.message.AlterIsrResponseData
import org.apache.kafka.common.metrics.Metrics
import org.apache.kafka.common.protocol.Errors
import org.apache.kafka.common.requests.{AbstractRequest, AlterIsrRequest, AlterIsrResponse}
import org.easymock.EasyMock
import org.junit.jupiter.api.Assertions._
import org.junit.jupiter.api.{BeforeEach, Test}
import org.mockito.ArgumentMatchers.{any, anyString}
import org.mockito.{ArgumentMatchers, Mockito}
class AlterIsrManagerTest {
val topic = "test-topic"
val time = new MockTime
val metrics = new Metrics
val brokerId = 1
var brokerToController: BrokerToControllerChannelManager = _
val tp0 = new TopicPartition(topic, 0)
val tp1 = new TopicPartition(topic, 1)
val tp2 = new TopicPartition(topic, 2)
@BeforeEach
def setup(): Unit = {
brokerToController = EasyMock.createMock(classOf[BrokerToControllerChannelManager])
}
@Test
def testBasic(): Unit = {
EasyMock.expect(brokerToController.start())
EasyMock.expect(brokerToController.sendRequest(EasyMock.anyObject(), EasyMock.anyObject())).once()
EasyMock.replay(brokerToController)
val scheduler = new MockScheduler(time)
val alterIsrManager = new DefaultAlterIsrManager(brokerToController, scheduler, time, brokerId, () => 2)
alterIsrManager.start()
alterIsrManager.submit(AlterIsrItem(tp0, new LeaderAndIsr(1, 1, List(1,2,3), 10), _ => {}, 0))
EasyMock.verify(brokerToController)
}
@Test
def testOverwriteWithinBatch(): Unit = {
val capture = EasyMock.newCapture[AbstractRequest.Builder[AlterIsrRequest]]()
val callbackCapture = EasyMock.newCapture[ControllerRequestCompletionHandler]()
EasyMock.expect(brokerToController.start())
EasyMock.expect(brokerToController.sendRequest(EasyMock.capture(capture), EasyMock.capture(callbackCapture))).times(2)
EasyMock.replay(brokerToController)
val scheduler = new MockScheduler(time)
val alterIsrManager = new DefaultAlterIsrManager(brokerToController, scheduler, time, brokerId, () => 2)
alterIsrManager.start()
// Only send one ISR update for a given topic+partition
assertTrue(alterIsrManager.submit(AlterIsrItem(tp0, new LeaderAndIsr(1, 1, List(1,2,3), 10), _ => {}, 0)))
assertFalse(alterIsrManager.submit(AlterIsrItem(tp0, new LeaderAndIsr(1, 1, List(1,2), 10), _ => {}, 0)))
// Simulate response
val alterIsrResp = partitionResponse(tp0, Errors.NONE)
val resp = new ClientResponse(null, null, "", 0L, 0L,
false, null, null, alterIsrResp)
callbackCapture.getValue.onComplete(resp)
// Now we can submit this partition again
assertTrue(alterIsrManager.submit(AlterIsrItem(tp0, new LeaderAndIsr(1, 1, List(1), 10), _ => {}, 0)))
EasyMock.verify(brokerToController)
// Make sure we sent the right request ISR={1}
val request = capture.getValue.build()
assertEquals(request.data().topics().size(), 1)
assertEquals(request.data().topics().get(0).partitions().get(0).newIsr().size(), 1)
}
@Test
def testSingleBatch(): Unit = {
val capture = EasyMock.newCapture[AbstractRequest.Builder[AlterIsrRequest]]()
val callbackCapture = EasyMock.newCapture[ControllerRequestCompletionHandler]()
EasyMock.expect(brokerToController.start())
EasyMock.expect(brokerToController.sendRequest(EasyMock.capture(capture), EasyMock.capture(callbackCapture))).times(2)
EasyMock.replay(brokerToController)
val scheduler = new MockScheduler(time)
val alterIsrManager = new DefaultAlterIsrManager(brokerToController, scheduler, time, brokerId, () => 2)
alterIsrManager.start()
// First request will send batch of one
alterIsrManager.submit(AlterIsrItem(new TopicPartition(topic, 0),
new LeaderAndIsr(1, 1, List(1,2,3), 10), _ => {}, 0))
// Other submissions will queue up until a response
for (i <- 1 to 9) {
alterIsrManager.submit(AlterIsrItem(new TopicPartition(topic, i),
new LeaderAndIsr(1, 1, List(1,2,3), 10), _ => {}, 0))
}
// Simulate response, omitting partition 0 will allow it to stay in unsent queue
val alterIsrResp = new AlterIsrResponse(new AlterIsrResponseData())
val resp = new ClientResponse(null, null, "", 0L, 0L,
false, null, null, alterIsrResp)
// On the callback, we check for unsent items and send another request
callbackCapture.getValue.onComplete(resp)
EasyMock.verify(brokerToController)
// Verify the last request sent had all 10 items
val request = capture.getValue.build()
assertEquals(request.data().topics().size(), 1)
assertEquals(request.data().topics().get(0).partitions().size(), 10)
}
@Test
def testAuthorizationFailed(): Unit = {
testRetryOnTopLevelError(Errors.CLUSTER_AUTHORIZATION_FAILED)
}
@Test
def testStaleBrokerEpoch(): Unit = {
testRetryOnTopLevelError(Errors.STALE_BROKER_EPOCH)
}
@Test
def testUnknownServer(): Unit = {
testRetryOnTopLevelError(Errors.UNKNOWN_SERVER_ERROR)
}
@Test
def testRetryOnAuthenticationFailure(): Unit = {
testRetryOnErrorResponse(new ClientResponse(null, null, "", 0L, 0L,
false, null, new AuthenticationException("authentication failed"), null))
}
@Test
def testRetryOnUnsupportedVersionError(): Unit = {
testRetryOnErrorResponse(new ClientResponse(null, null, "", 0L, 0L,
false, new UnsupportedVersionException("unsupported version"), null, null))
}
private def testRetryOnTopLevelError(error: Errors): Unit = {
val alterIsrResp = new AlterIsrResponse(new AlterIsrResponseData().setErrorCode(error.code))
val response = new ClientResponse(null, null, "", 0L, 0L,
false, null, null, alterIsrResp)
testRetryOnErrorResponse(response)
}
private def testRetryOnErrorResponse(response: ClientResponse): Unit = {
val leaderAndIsr = new LeaderAndIsr(1, 1, List(1,2,3), 10)
val isrs = Seq(AlterIsrItem(tp0, leaderAndIsr, _ => { }, 0))
val callbackCapture = EasyMock.newCapture[ControllerRequestCompletionHandler]()
EasyMock.expect(brokerToController.start())
EasyMock.expect(brokerToController.sendRequest(EasyMock.anyObject(), EasyMock.capture(callbackCapture))).times(1)
EasyMock.replay(brokerToController)
val scheduler = new MockScheduler(time)
val alterIsrManager = new DefaultAlterIsrManager(brokerToController, scheduler, time, brokerId, () => 2)
alterIsrManager.start()
isrs.foreach(alterIsrManager.submit)
EasyMock.verify(brokerToController)
callbackCapture.getValue.onComplete(response)
// Any top-level error, we want to retry, so we don't clear items from the pending map
assertTrue(alterIsrManager.unsentIsrUpdates.containsKey(tp0))
EasyMock.reset(brokerToController)
EasyMock.expect(brokerToController.sendRequest(EasyMock.anyObject(), EasyMock.capture(callbackCapture))).times(1)
EasyMock.replay(brokerToController)
// After some time, we will retry failed requests
time.sleep(100)
scheduler.tick()
// After a successful response, we can submit another AlterIsrItem
val retryAlterIsrResponse = partitionResponse(tp0, Errors.NONE)
val retryResponse = new ClientResponse(null, null, "", 0L, 0L,
false, null, null, retryAlterIsrResponse)
callbackCapture.getValue.onComplete(retryResponse)
EasyMock.verify(brokerToController)
assertFalse(alterIsrManager.unsentIsrUpdates.containsKey(tp0))
}
@Test
def testInvalidUpdateVersion(): Unit = {
checkPartitionError(Errors.INVALID_UPDATE_VERSION)
}
@Test
def testUnknownTopicPartition(): Unit = {
checkPartitionError(Errors.UNKNOWN_TOPIC_OR_PARTITION)
}
@Test
def testNotLeaderOrFollower(): Unit = {
checkPartitionError(Errors.NOT_LEADER_OR_FOLLOWER)
}
private def checkPartitionError(error: Errors): Unit = {
val alterIsrManager = testPartitionError(tp0, error)
// Any partition-level error should clear the item from the pending queue allowing for future updates
assertTrue(alterIsrManager.submit(AlterIsrItem(tp0, new LeaderAndIsr(1, 1, List(1,2,3), 10), _ => {}, 0)))
}
private def testPartitionError(tp: TopicPartition, error: Errors): AlterIsrManager = {
val callbackCapture = EasyMock.newCapture[ControllerRequestCompletionHandler]()
EasyMock.reset(brokerToController)
EasyMock.expect(brokerToController.start())
EasyMock.expect(brokerToController.sendRequest(EasyMock.anyObject(), EasyMock.capture(callbackCapture))).once()
EasyMock.replay(brokerToController)
val scheduler = new MockScheduler(time)
val alterIsrManager = new DefaultAlterIsrManager(brokerToController, scheduler, time, brokerId, () => 2)
alterIsrManager.start()
var capturedError: Option[Errors] = None
val callback = (result: Either[Errors, LeaderAndIsr]) => {
result match {
case Left(error: Errors) => capturedError = Some(error)
case Right(_) => fail("Should have seen error")
}
}
alterIsrManager.submit(AlterIsrItem(tp, new LeaderAndIsr(1, 1, List(1,2,3), 10), callback, 0))
EasyMock.verify(brokerToController)
EasyMock.reset(brokerToController)
val alterIsrResp = partitionResponse(tp, error)
val resp = new ClientResponse(null, null, "", 0L, 0L,
false, null, null, alterIsrResp)
callbackCapture.getValue.onComplete(resp)
assertTrue(capturedError.isDefined)
assertEquals(capturedError.get, error)
alterIsrManager
}
@Test
def testOneInFlight(): Unit = {
val callbackCapture = EasyMock.newCapture[ControllerRequestCompletionHandler]()
EasyMock.reset(brokerToController)
EasyMock.expect(brokerToController.start())
EasyMock.expect(brokerToController.sendRequest(EasyMock.anyObject(), EasyMock.capture(callbackCapture))).once()
EasyMock.replay(brokerToController)
val scheduler = new MockScheduler(time)
val alterIsrManager = new DefaultAlterIsrManager(brokerToController, scheduler, time, brokerId, () => 2)
alterIsrManager.start()
// First submit will send the request
alterIsrManager.submit(AlterIsrItem(tp0, new LeaderAndIsr(1, 1, List(1,2,3), 10), _ => {}, 0))
// These will become pending unsent items
alterIsrManager.submit(AlterIsrItem(tp1, new LeaderAndIsr(1, 1, List(1,2,3), 10), _ => {}, 0))
alterIsrManager.submit(AlterIsrItem(tp2, new LeaderAndIsr(1, 1, List(1,2,3), 10), _ => {}, 0))
EasyMock.verify(brokerToController)
// Once the callback runs, another request will be sent
EasyMock.reset(brokerToController)
EasyMock.expect(brokerToController.sendRequest(EasyMock.anyObject(), EasyMock.capture(callbackCapture))).once()
EasyMock.replay(brokerToController)
val alterIsrResp = new AlterIsrResponse(new AlterIsrResponseData())
val resp = new ClientResponse(null, null, "", 0L, 0L,
false, null, null, alterIsrResp)
callbackCapture.getValue.onComplete(resp)
EasyMock.verify(brokerToController)
}
@Test
def testPartitionMissingInResponse(): Unit = {
val callbackCapture = EasyMock.newCapture[ControllerRequestCompletionHandler]()
EasyMock.reset(brokerToController)
EasyMock.expect(brokerToController.start())
EasyMock.expect(brokerToController.sendRequest(EasyMock.anyObject(), EasyMock.capture(callbackCapture))).once()
EasyMock.replay(brokerToController)
val scheduler = new MockScheduler(time)
val alterIsrManager = new DefaultAlterIsrManager(brokerToController, scheduler, time, brokerId, () => 2)
alterIsrManager.start()
val count = new AtomicInteger(0)
val callback = (result: Either[Errors, LeaderAndIsr]) => {
count.incrementAndGet()
return
}
alterIsrManager.submit(AlterIsrItem(tp0, new LeaderAndIsr(1, 1, List(1,2,3), 10), callback, 0))
alterIsrManager.submit(AlterIsrItem(tp1, new LeaderAndIsr(1, 1, List(1,2,3), 10), callback, 0))
alterIsrManager.submit(AlterIsrItem(tp2, new LeaderAndIsr(1, 1, List(1,2,3), 10), callback, 0))
EasyMock.verify(brokerToController)
// Three partitions were sent, but only one returned
val alterIsrResp = partitionResponse(tp0, Errors.UNKNOWN_SERVER_ERROR)
val resp = new ClientResponse(null, null, "", 0L, 0L,
false, null, null, alterIsrResp)
callbackCapture.getValue.onComplete(resp)
assertEquals(count.get, 3, "Expected all callbacks to run")
}
@Test
def testZkBasic(): Unit = {
val scheduler = new MockScheduler(time)
scheduler.startup()
val kafkaZkClient = Mockito.mock(classOf[KafkaZkClient])
Mockito.doAnswer(_ => (true, 2))
.when(kafkaZkClient)
.conditionalUpdatePath(anyString(), any(), ArgumentMatchers.eq(1), any())
Mockito.doAnswer(_ => (false, 2))
.when(kafkaZkClient)
.conditionalUpdatePath(anyString(), any(), ArgumentMatchers.eq(3), any())
val zkIsrManager = new ZkIsrManager(scheduler, time, kafkaZkClient)
zkIsrManager.start()
def expectMatch(expect: Either[Errors, LeaderAndIsr])(result: Either[Errors, LeaderAndIsr]): Unit = {
assertEquals(expect, result)
}
// Correct ZK version
assertTrue(zkIsrManager.submit(AlterIsrItem(tp0, new LeaderAndIsr(1, 1, List(1,2,3), 1),
expectMatch(Right(new LeaderAndIsr(1, 1, List(1,2,3), 2))), 0)))
// Wrong ZK version
assertTrue(zkIsrManager.submit(AlterIsrItem(tp0, new LeaderAndIsr(1, 1, List(1,2,3), 3),
expectMatch(Left(Errors.INVALID_UPDATE_VERSION)), 0)))
}
private def partitionResponse(tp: TopicPartition, error: Errors): AlterIsrResponse = {
new AlterIsrResponse(new AlterIsrResponseData()
.setTopics(Collections.singletonList(
new AlterIsrResponseData.TopicData()
.setName(tp.topic())
.setPartitions(Collections.singletonList(
new AlterIsrResponseData.PartitionData()
.setPartitionIndex(tp.partition())
.setErrorCode(error.code))))))
}
}
| guozhangwang/kafka | core/src/test/scala/unit/kafka/server/AlterIsrManagerTest.scala | Scala | apache-2.0 | 15,087 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.expressions.validation
import org.apache.flink.table.api.{SqlParserException, ValidationException}
import org.apache.flink.table.api.scala._
import org.apache.flink.table.expressions.utils.ScalarTypesTestBase
import org.junit.Test
class ScalarFunctionsValidationTest extends ScalarTypesTestBase {
// ----------------------------------------------------------------------------------------------
// Math functions
// ----------------------------------------------------------------------------------------------
@Test(expected = classOf[IllegalArgumentException])
def testInvalidLog1(): Unit = {
// invalid arithmetic argument
testSqlApi(
"LOG(1, 100)",
"FAIL"
)
}
@Test(expected = classOf[IllegalArgumentException])
def testInvalidLog2(): Unit ={
// invalid arithmetic argument
testSqlApi(
"LOG(-1)",
"FAIL"
)
}
// ----------------------------------------------------------------------------------------------
// String functions
// ----------------------------------------------------------------------------------------------
@Test(expected = classOf[ValidationException])
def testInvalidSubstring1(): Unit = {
// Must fail. Parameter of substring must be an Integer not a Double.
testTableApi("test".substring(2.0.toExpr), "FAIL", "FAIL")
}
@Test(expected = classOf[ValidationException])
def testInvalidSubstring2(): Unit = {
// Must fail. Parameter of substring must be an Integer not a String.
testTableApi("test".substring("test".toExpr), "FAIL", "FAIL")
}
// ----------------------------------------------------------------------------------------------
// Temporal functions
// ----------------------------------------------------------------------------------------------
@Test(expected = classOf[SqlParserException])
def testTimestampAddWithWrongTimestampInterval(): Unit ={
testSqlApi("TIMESTAMPADD(XXX, 1, timestamp '2016-02-24'))", "2016-06-16")
}
@Test(expected = classOf[SqlParserException])
def testTimestampAddWithWrongTimestampFormat(): Unit ={
testSqlApi("TIMESTAMPADD(YEAR, 1, timestamp '2016-02-24'))", "2016-06-16")
}
@Test(expected = classOf[ValidationException])
def testTimestampAddWithWrongQuantity(): Unit ={
testSqlApi("TIMESTAMPADD(YEAR, 1.0, timestamp '2016-02-24 12:42:25')", "2016-06-16")
}
// ----------------------------------------------------------------------------------------------
// Sub-query functions
// ----------------------------------------------------------------------------------------------
@Test(expected = classOf[ValidationException])
def testInValidationExceptionMoreThanOneTypes(): Unit = {
testTableApi(
'f2.in('f3, 'f4, 4),
"f2.in(f3, f4, 4)",
"true"
)
}
@Test(expected = classOf[ValidationException])
def scalaInValidationExceptionDifferentOperandsTest(): Unit = {
testTableApi(
'f1.in("Hi", "Hello world", "Comment#1"),
"true",
"true"
)
}
@Test(expected = classOf[ValidationException])
def javaInValidationExceptionDifferentOperandsTest(): Unit = {
testTableApi(
true,
"f1.in('Hi','Hello world','Comment#1')",
"true"
)
}
}
| zohar-mizrahi/flink | flink-libraries/flink-table/src/test/scala/org/apache/flink/table/expressions/validation/ScalarFunctionsValidationTest.scala | Scala | apache-2.0 | 4,075 |
/*
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.runtime.fragment
package user.join
import scala.collection.JavaConverters._
import com.asakusafw.runtime.core.GroupView
import com.asakusafw.runtime.model.DataModel
abstract class BroadcastJoinOperatorFragment[M <: DataModel[M], T <: DataModel[T]]
extends Fragment[T]
with Join[M, T] {
protected def masters: GroupView[M]
protected def keyElements(tx: T): Array[AnyRef]
override def doAdd(tx: T): Unit = {
val master = masterSelection(masters.find(keyElements(tx): _*), tx)
join(master, tx)
}
}
abstract class BroadcastMasterBranchOperatorFragment[M <: DataModel[M], T <: DataModel[T], E <: Enum[E]]( // scalastyle:ignore
val children: Map[E, Fragment[T]])
extends BroadcastJoinOperatorFragment[M, T]
with MasterBranch[M, T, E] {
override def doReset(): Unit = {
children.values.foreach(_.reset())
}
}
abstract class BroadcastMasterCheckOperatorFragment[M <: DataModel[M], T <: DataModel[T]](
val missed: Fragment[T],
val found: Fragment[T])
extends BroadcastJoinOperatorFragment[M, T]
with MasterCheck[M, T] {
override def doReset(): Unit = {
missed.reset()
found.reset()
}
}
abstract class BroadcastMasterJoinOperatorFragment[M <: DataModel[M], T <: DataModel[T], J <: DataModel[J]]( // scalastyle:ignore
val missed: Fragment[T],
val joined: Fragment[J],
val joinedDataModel: J)
extends BroadcastJoinOperatorFragment[M, T]
with MasterJoin[M, T, J] {
override def doReset(): Unit = {
missed.reset()
joined.reset()
}
}
abstract class BroadcastMasterJoinUpdateOperatorFragment[M <: DataModel[M], T <: DataModel[T]](
val missed: Fragment[T],
val updated: Fragment[T])
extends BroadcastJoinOperatorFragment[M, T]
with MasterJoinUpdate[M, T] {
override def doReset(): Unit = {
missed.reset()
updated.reset()
}
}
| ashigeru/asakusafw-spark | runtime/src/main/scala/com/asakusafw/spark/runtime/fragment/user/join/BroadcastJoinOperatorFragment.scala | Scala | apache-2.0 | 2,458 |
/*
* Copyright 2015 Nicolas Rinaudo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kantan.csv
package laws
import kantan.codecs.laws.discipline._
import kantan.csv._
package object discipline extends DisciplinePackage {
type RowDecoderTests[A] = DecoderTests[Seq[String], A, DecodeError, codecs.type]
type RowEncoderTests[A] = EncoderTests[Seq[String], A, codecs.type]
type RowCodecTests[A] = CodecTests[Seq[String], A, DecodeError, codecs.type]
type CellDecoderTests[A] = DecoderTests[String, A, DecodeError, codecs.type]
type CellEncoderTests[A] = EncoderTests[String, A, codecs.type]
type CellCodecTests[A] = CodecTests[String, A, DecodeError, codecs.type]
}
| nrinaudo/scala-csv | laws/shared/src/main/scala/kantan/csv/laws/discipline/package.scala | Scala | mit | 1,206 |
/*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.core.validation
/**
* Created by brharrington on 3/14/15.
*/
sealed trait ValidationResult {
def isSuccess: Boolean
def isFailure: Boolean = !isSuccess
}
object ValidationResult {
case object Pass extends ValidationResult {
def isSuccess: Boolean = true
}
case class Fail(rule: String, reason: String) extends ValidationResult {
def isSuccess: Boolean = false
}
}
| gorcz/atlas | atlas-core/src/main/scala/com/netflix/atlas/core/validation/ValidationResult.scala | Scala | apache-2.0 | 1,013 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.filter.index
import org.locationtech.geomesa.utils.index.SizeSeparatedBucketIndex
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
class SizeSeparatedBucketIndexSupport
(override val sft: SimpleFeatureType,
override val index: SizeSeparatedBucketIndex[SimpleFeature]) extends SpatialIndexSupport
object SizeSeparatedBucketIndexSupport {
def apply(sft: SimpleFeatureType, xResolution: Double, yResolution: Double): SizeSeparatedBucketIndexSupport = {
val index = new SizeSeparatedBucketIndex[SimpleFeature](xBucketMultiplier = xResolution, yBucketMultiplier = yResolution)
new SizeSeparatedBucketIndexSupport(sft, index)
}
}
| ddseapy/geomesa | geomesa-filter/src/main/scala/org/locationtech/geomesa/filter/index/SizeSeparatedBucketIndexSupport.scala | Scala | apache-2.0 | 1,166 |
/**************************************************************************
Copyright 2014 Allen Institute for Artificial Intelligence Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
****************************************************************************/
package org.allenai.ari.solvers.graphmatch.models.features
import org.allenai.ari.solvers.graphmatch.graph.path.{ EdgePath, PathTrait }
import com.tinkerpop.blueprints.impls.tg.TinkerGraph
import breeze.util.{ HashIndex, MutableIndex }
import breeze.linalg.SparseVector
import breeze.linalg.norm
import com.tinkerpop.blueprints.Vertex
import org.allenai.ari.solvers.graphmatch.tools.{ StopWord, PathToPhraseSeq }
import scala.util.Sorting
/** Created by TomK on 8/25/14.
*/
object WordEqualityCosineFeature extends LightweightLexicalFeatureTrait {
weight = 3.0
override val featureTag: String = "COSWORD"
override val requireStemming = false
override def wordsFeature(words1: Seq[String], words2: Seq[String], focus: Option[String]): Double = {
if (words1.isEmpty || words2.isEmpty) return 0.0
// Build vector representations of the two phrases.
val vocabIndex = new HashIndex[String]()
val w1 = words1.reduce(_ + " " + _)
val w2 = words2.reduce(_ + " " + _)
if (StopWord(w1) || StopWord(w2)) return 0.0
// More loops than needed but these should be small loops.
val indices1 = words1.map(w => vocabIndex.index(w.toLowerCase)).toArray
val indices2 = words2.map(w => vocabIndex.index(w.toLowerCase)).toArray
Sorting.quickSort(indices2) // SparseVector requires ordered indices.
println(s"Indices1 : ${indices1}")
println(s"Indices2 : ${indices2}")
val vector1 = new SparseVector(Array.fill(indices1.size) { 1 }, indices1, vocabIndex.size)
val vector2 = new SparseVector(Array.fill(indices2.size) { 1 }, indices2, vocabIndex.size)
println(s"Vector1 : $vector1")
println(s"Vector2 : $vector2")
val cosSim: Double = (vector1 dot vector2) / (norm(vector1) * norm(vector2))
cosSim
}
override def feature(qp: PathTrait, ep: PathTrait, graph: TinkerGraph, focus: Option[String] = None): Double = {
qp match {
case p: EdgePath => return 0.0
case _ => {}
}
ep match {
case p: EdgePath => return 0.0
case _ => {}
}
val qw = PathToPhraseSeq(qp)
val ew = PathToPhraseSeq(ep)
wordsFeature(qw, ew)
}
}
| tomkwiat/dependency-graph-similarity-measure | src/main/scala/org/allenai/ari/solvers/graphmatch/models/features/WordEqualityCosineFeature.scala | Scala | apache-2.0 | 2,932 |
import scala.annotation.tailrec
import scala.quoted.*
object Macros {
inline def unrolledForeach(seq: IndexedSeq[Int], inline f: Int => Unit, inline unrollSize: Int): Unit = // or f: Int => Unit
${ unrolledForeachImpl('seq, 'f, 'unrollSize) }
def unrolledForeachImpl(seq: Expr[IndexedSeq[Int]], f: Expr[Int => Unit], unrollSizeExpr: Expr[Int]) (using Quotes): Expr[Unit] =
unrolledForeachImpl(seq, f, unrollSizeExpr.valueOrAbort)
def unrolledForeachImpl(seq: Expr[IndexedSeq[Int]], f: Expr[Int => Unit], unrollSize: Int)(using Quotes): Expr[Unit] = '{
val size = ($seq).length
assert(size % (${Expr(unrollSize)}) == 0) // for simplicity of the implementation
var i = 0
while (i < size) {
${
for (j <- new UnrolledRange(0, unrollSize)) '{
val index = i + ${Expr(j)}
val element = ($seq)(index)
${ Expr.betaReduce('{$f(element)}) } // or `($f)(element)` if `f` should not be inlined
}
}
i += ${Expr(unrollSize)}
}
}
class UnrolledRange(start: Int, end: Int) {
def foreach(f: Int => Expr[Unit])(using Quotes): Expr[Unit] = {
@tailrec def loop(i: Int, acc: Expr[Unit]): Expr[Unit] =
if (i >= 0) loop(i - 1, '{ ${f(i)}; $acc })
else acc
loop(end - 1, '{})
}
}
}
| dotty-staging/dotty | tests/run-macros/i4734/Macro_1.scala | Scala | apache-2.0 | 1,302 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Encoder, Row, SparkSession}
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
import org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.physical.{Partitioning, UnknownPartitioning}
import org.apache.spark.sql.execution.metric.SQLMetrics
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.DataType
import org.apache.spark.util.Utils
object RDDConversions {
def productToRowRdd[A <: Product](data: RDD[A], outputTypes: Seq[DataType]): RDD[InternalRow] = {
data.mapPartitions { iterator =>
val numColumns = outputTypes.length
val mutableRow = new GenericInternalRow(numColumns)
val converters = outputTypes.map(CatalystTypeConverters.createToCatalystConverter)
iterator.map { r =>
var i = 0
while (i < numColumns) {
mutableRow(i) = converters(i)(r.productElement(i))
i += 1
}
mutableRow
}
}
}
/**
* Convert the objects inside Row into the types Catalyst expected.
*/
def rowToRowRdd(data: RDD[Row], outputTypes: Seq[DataType]): RDD[InternalRow] = {
data.mapPartitions { iterator =>
val numColumns = outputTypes.length
val mutableRow = new GenericInternalRow(numColumns)
val converters = outputTypes.map(CatalystTypeConverters.createToCatalystConverter)
iterator.map { r =>
var i = 0
while (i < numColumns) {
mutableRow(i) = converters(i)(r(i))
i += 1
}
mutableRow
}
}
}
}
object ExternalRDD {
def apply[T: Encoder](rdd: RDD[T], session: SparkSession): LogicalPlan = {
val externalRdd = ExternalRDD(CatalystSerde.generateObjAttr[T], rdd)(session)
CatalystSerde.serialize[T](externalRdd)
}
}
/** Logical plan node for scanning data from an RDD. */
case class ExternalRDD[T](
outputObjAttr: Attribute,
rdd: RDD[T])(session: SparkSession)
extends LeafNode with ObjectProducer with MultiInstanceRelation {
override protected final def otherCopyArgs: Seq[AnyRef] = session :: Nil
override def newInstance(): ExternalRDD.this.type =
ExternalRDD(outputObjAttr.newInstance(), rdd)(session).asInstanceOf[this.type]
override protected def stringArgs: Iterator[Any] = Iterator(output)
@transient override def computeStats(conf: SQLConf): Statistics = Statistics(
// TODO: Instead of returning a default value here, find a way to return a meaningful size
// estimate for RDDs. See PR 1238 for more discussions.
sizeInBytes = BigInt(session.sessionState.conf.defaultSizeInBytes)
)
}
/** Physical plan node for scanning data from an RDD. */
case class ExternalRDDScanExec[T](
outputObjAttr: Attribute,
rdd: RDD[T]) extends LeafExecNode with ObjectProducerExec {
override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
protected override def doExecute(): RDD[InternalRow] = {
val numOutputRows = longMetric("numOutputRows")
val outputDataType = outputObjAttr.dataType
rdd.mapPartitionsInternal { iter =>
val outputObject = ObjectOperator.wrapObjectToRow(outputDataType)
iter.map { value =>
numOutputRows += 1
outputObject(value)
}
}
}
override def simpleString: String = {
s"Scan $nodeName${output.mkString("[", ",", "]")}"
}
}
/** Logical plan node for scanning data from an RDD of InternalRow. */
case class LogicalRDD(
output: Seq[Attribute],
rdd: RDD[InternalRow],
outputPartitioning: Partitioning = UnknownPartitioning(0),
outputOrdering: Seq[SortOrder] = Nil)(session: SparkSession)
extends LeafNode with MultiInstanceRelation {
override protected final def otherCopyArgs: Seq[AnyRef] = session :: Nil
override def newInstance(): LogicalRDD.this.type = {
val rewrite = output.zip(output.map(_.newInstance())).toMap
val rewrittenPartitioning = outputPartitioning match {
case p: Expression =>
p.transform {
case e: Attribute => rewrite.getOrElse(e, e)
}.asInstanceOf[Partitioning]
case p => p
}
val rewrittenOrdering = outputOrdering.map(_.transform {
case e: Attribute => rewrite.getOrElse(e, e)
}.asInstanceOf[SortOrder])
LogicalRDD(
output.map(rewrite),
rdd,
rewrittenPartitioning,
rewrittenOrdering
)(session).asInstanceOf[this.type]
}
override protected def stringArgs: Iterator[Any] = Iterator(output)
@transient override def computeStats(conf: SQLConf): Statistics = Statistics(
// TODO: Instead of returning a default value here, find a way to return a meaningful size
// estimate for RDDs. See PR 1238 for more discussions.
sizeInBytes = BigInt(session.sessionState.conf.defaultSizeInBytes)
)
}
/** Physical plan node for scanning data from an RDD of InternalRow. */
case class RDDScanExec(
output: Seq[Attribute],
rdd: RDD[InternalRow],
override val nodeName: String,
override val outputPartitioning: Partitioning = UnknownPartitioning(0),
override val outputOrdering: Seq[SortOrder] = Nil) extends LeafExecNode {
override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
protected override def doExecute(): RDD[InternalRow] = {
val numOutputRows = longMetric("numOutputRows")
rdd.mapPartitionsWithIndexInternal { (index, iter) =>
val proj = UnsafeProjection.create(schema)
proj.initialize(index)
iter.map { r =>
numOutputRows += 1
proj(r)
}
}
}
override def simpleString: String = {
s"Scan $nodeName${Utils.truncatedString(output, "[", ",", "]")}"
}
}
| bOOm-X/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala | Scala | apache-2.0 | 6,756 |
package com.pkinsky
import java.util.concurrent.atomic.AtomicInteger
import akka.http.scaladsl.model.ws.{InvalidUpgradeResponse, WebsocketUpgradeResponse, WebsocketRequest, TextMessage}
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.Uri
import akka.stream.ThrottleMode
import akka.stream.scaladsl.{Keep, Sink, RunnableGraph, Source}
import play.api.libs.json.Json
import scala.concurrent.{Future, Await}
import scala.concurrent.duration._
import scala.language.postfixOps
object LoadTest extends App with AppContext {
val clients = 256
val eventsPerClient = 256
val eventsSent = new AtomicInteger(0)
def testData(clientId: String): Source[Event, Unit] =
Source.unfoldInf(1) { n =>
val event = Event(s"msg number $n", clientId, System.currentTimeMillis())
(n + 1, event)
}.take(eventsPerClient).throttle(1, 100 millis, 1, ThrottleMode.Shaping)
def wsClient(clientId: String): RunnableGraph[Future[WebsocketUpgradeResponse]] =
testData(clientId).map(e => TextMessage.Strict(Json.toJson(e).toString))
.map { x => eventsSent.incrementAndGet(); x }
.viaMat(Http().websocketClientFlow(WebsocketRequest(Uri(s"ws://localhost:$port/ws"))))(Keep.right).to(Sink.ignore)
//set up websocket connections
(1 to clients).foreach { id =>
wsClient(s"client $id").run()
}
//watch kafka for messages sent via websocket
val kafkaConsumerGraph: RunnableGraph[Future[Seq[Event]]] =
kafka.consume[Event](eventTopic, "group_new")
.take(clients * eventsPerClient).takeWithin(2 minutes)
.toMat(Sink.seq)(Keep.right)
val res = Await.result(kafkaConsumerGraph.run, 5 minutes)
println(s"sent ${eventsSent.get()} events total")
println(s"res size: ${res.length}")
} | pkinsky/ws_to_kafka | src/main/scala/com/pkinsky/LoadTest.scala | Scala | mit | 1,742 |
package lila.analyse
import chess.format.pgn.Glyph
import lila.tree.Eval._
sealed trait Advice {
def judgment: Advice.Judgment
def info: Info
def prev: Info
def ply = info.ply
def turn = info.turn
def color = info.color
def cp = info.cp
def mate = info.mate
def makeComment(withEval: Boolean, withBestMove: Boolean): String =
evalComment.filter(_ => withEval).fold("") { c =>
s"($c) "
} +
(this match {
case MateAdvice(seq, _, _, _) => seq.desc
case CpAdvice(judgment, _, _) => judgment.toString
}) + "." + {
info.variation.headOption.filter(_ => withBestMove).fold("") { move =>
s" Best move was $move."
}
}
def evalComment: Option[String] =
Some {
List(prev.evalComment, info.evalComment).flatten mkString " → "
} filter (_.nonEmpty)
}
object Advice {
sealed abstract class Judgment(val glyph: Glyph, val name: String) {
override def toString = name
def isBlunder = this == Judgment.Blunder
}
object Judgment {
object Inaccuracy extends Judgment(Glyph.MoveAssessment.dubious, "Inaccuracy")
object Mistake extends Judgment(Glyph.MoveAssessment.mistake, "Mistake")
object Blunder extends Judgment(Glyph.MoveAssessment.blunder, "Blunder")
val all = List(Inaccuracy, Mistake, Blunder)
}
def apply(prev: Info, info: Info): Option[Advice] = CpAdvice(prev, info) orElse MateAdvice(prev, info)
}
private[analyse] case class CpAdvice(judgment: Advice.Judgment, info: Info, prev: Info) extends Advice
private[analyse] object CpAdvice {
private val cpJudgments =
List(300 -> Advice.Judgment.Blunder, 100 -> Advice.Judgment.Mistake, 50 -> Advice.Judgment.Inaccuracy)
def apply(prev: Info, info: Info): Option[CpAdvice] =
for {
cp ← prev.cp map (_.ceiled.centipawns)
infoCp ← info.cp map (_.ceiled.centipawns)
delta = {
val d = infoCp - cp
info.color.fold(-d, d)
}
judgment ← cpJudgments find { case (d, n) => d <= delta } map (_._2)
} yield CpAdvice(judgment, info, prev)
}
sealed abstract private[analyse] class MateSequence(val desc: String)
private[analyse] case object MateDelayed extends MateSequence(desc = "Not the best checkmate sequence")
private[analyse] case object MateLost extends MateSequence(desc = "Lost forced checkmate sequence")
private[analyse] case object MateCreated extends MateSequence(desc = "Checkmate is now unavoidable")
private[analyse] object MateSequence {
def apply(prev: Option[Mate], next: Option[Mate]): Option[MateSequence] =
Some(prev, next) collect {
case (None, Some(n)) if n.negative => MateCreated
case (Some(p), None) if p.positive => MateLost
case (Some(p), Some(n)) if p.positive && n.negative => MateLost
case (Some(p), Some(n)) if p.positive && n >= p && p <= Mate(5) => MateDelayed
}
}
private[analyse] case class MateAdvice(
sequence: MateSequence,
judgment: Advice.Judgment,
info: Info,
prev: Info
) extends Advice
private[analyse] object MateAdvice {
def apply(prev: Info, info: Info): Option[MateAdvice] = {
def invertCp(cp: Cp) = cp invertIf info.color.black
def invertMate(mate: Mate) = mate invertIf info.color.black
def prevCp = prev.cp.map(invertCp).fold(0)(_.centipawns)
def nextCp = info.cp.map(invertCp).fold(0)(_.centipawns)
MateSequence(prev.mate map invertMate, info.mate map invertMate) map { sequence =>
import Advice.Judgment._
val judgment = sequence match {
case MateCreated if prevCp < -999 => Inaccuracy
case MateCreated if prevCp < -700 => Mistake
case MateCreated => Blunder
case MateLost if nextCp > 999 => Inaccuracy
case MateLost if nextCp > 700 => Mistake
case MateLost => Blunder
case MateDelayed => Inaccuracy
}
MateAdvice(sequence, judgment, info, prev)
}
}
}
| ornicar/lichess-db | src/main/scala/lila/analyse/Advice.scala | Scala | agpl-3.0 | 4,111 |
import scala.io.StdIn;
object AshishDonation extends App {
def isPossible(c:Long,X:Long):Boolean =
(c*(c+1)*(2*c+1)/6) <= X;
def binarySearchDonations(lo:Long, hi:Long, X:Long):Long = {
val mid:Long = (lo + hi) / 2;
if (hi-lo <= 1) lo;
else if (isPossible(mid,X))
binarySearchDonations(mid,hi,X);
else
binarySearchDonations(lo,mid,X);
}
val nTestCases:Int = StdIn.readInt;
for (i <- 1 to nTestCases) {
val X:Long = StdIn.readLong;
println(binarySearchDonations(0,(1e6).toLong,X));
}
} | suchithjn225/competitive-programming | HACKERRANK/NumberTheory/AshishDonation.scala | Scala | gpl-3.0 | 512 |
/**
* @author Ernesto Gutiérrez Corona- ernesto.g.corona@gmail.com
*/
package tinga.nlp.texttools
import scala.collection.JavaConversions.mapAsScalaMap
import scala.collection.mutable.Map
import scala.collection.mutable.Buffer
import java.io.InputStreamReader
import java.io.BufferedReader
/** Object for preprocessing text
*
* It can be customized for english: "en", spanish: "es", french: "fr", italian: "it" and german: "de"
*/
object TextPreprocessor{
val punctuationChars = List('.',',',';',':','¡','!','¿','?','(',')','[',']','{','}','`',
'\\','\'','@','#','$','^','&','*','+','-','|','=','_','~','%',
'<','>','/', '"')
var lexiconDir = "lexicon/"
def readAsStream(path: String): List[String] = {
val is = Thread.currentThread().getContextClassLoader().getResourceAsStream(path)
val isr = new InputStreamReader(is)
val br = new BufferedReader(isr)
var lines = List[String]()
var line = br.readLine
while(line != null){
lines = line :: lines
line = br.readLine
}
br.close
isr.close
is.close
lines
}
def readFileToMap(path: String): Map[String,String] = {
println(path)
val lines = readAsStream(path)
var map: Map[String, String] = Map[String, String]() //new java.util.HashMap[String, String]
for(line <- lines) {
if (line.split(" ").length == 2)
map += line.split(" ")(0) -> line.split(" ")(1)
}
map
}
def readFileToStringList(path: String): List[String] = {
readAsStream(path)
}
def readFileToCharList(path: String): List[Char] = {
readAsStream(path).flatMap(c => c.toCharArray)
}
def langSpecificChars(lang: String): List[Char] = {
readFileToCharList(lexiconDir + f"special-characters/$lang%s-characters.txt")
}
def langStopwords(lang: String): List[String] = {
readFileToStringList(lexiconDir + f"stopwords/$lang%s-stopwords.txt")
}
/** Preprocess text customized by language
*
* @return String optionally cleaned from punctuation (with exceptions) and stopwords (with exceptions)
*/
def preprocess(lang: String)(text: String,
punctuation: Boolean = false,
exceptPunct: List[Char] = List(),
stopwords: Boolean = false,
exceptStop: List[String] = List()): String = lang match {
case "en" => cleanText(text, List(),
punctuation, exceptPunct,
stopwords, langStopwords("en"), exceptStop)
case "es" => cleanText(text, langSpecificChars("es"),
punctuation, exceptPunct,
stopwords, langStopwords("es"), exceptStop)
case "fr" => cleanText(text, langSpecificChars("fr"),
punctuation, exceptPunct,
stopwords, langStopwords("fr"), exceptStop)
case "it" => cleanText(text, langSpecificChars("it"),
punctuation, exceptPunct,
stopwords, langStopwords("it"), exceptStop)
case "de" => cleanText(text, langSpecificChars("de"),
punctuation, exceptPunct,
stopwords, langStopwords("de"), exceptStop)
}
def cleanText(text: String, langChars: List[Char],
punctuation: Boolean, exceptPunct: List[Char],
stopwords: Boolean, langStopwords: List[String], exceptStop: List[String]): String = {
if (punctuation && !stopwords) {
text filter { (c: Char) => (isAllowedChar(c, langChars)) &&
(!(punctuationChars contains c) ||
(exceptPunct contains c)) }
}
else{
if (stopwords) {
val punctDeleted = text filter { (c: Char) => (isAllowedChar(c, langChars)) &&
(!(punctuationChars contains c) ||
(exceptPunct contains c)) }
val wordList = punctDeleted.split(' ').toList map (str => str.toLowerCase.trim)
val stopwordsRemoved = wordList filter { (str: String) => (!(langStopwords contains str) ||
(exceptStop contains str)) }
stopwordsRemoved.mkString(" ")
}
else text filter { (c: Char) => isAllowedChar(c, langChars) }
}
}
def isAllowedChar(c: Char, chars: List[Char]) = c <= '~' || chars.contains(c)
def removeDiacritics(str: String): String = {
val diacriticChars = "ÀàÈèÌìÒòÙùÁáÉéÍíÓóÚúÝýÂâÊêÎîÔôÛûŶŷÃãÕõÑñÄäËëÏïÖöÜüŸÿÅåÇçŐőŰű".toCharArray
val asciiChars = "AaEeIiOoUuAaEeIiOoUuYyAaEeIiOoUuYyAaOoNnAaEeIiOoUuYyAaCcOoUu".toCharArray
str map (c => if(diacriticChars contains c) asciiChars(diacriticChars.indexOf(c)) else c)
}
}
| Innova4D/tinga | src/main/scala/nlp/texttools/Preprocessing.scala | Scala | apache-2.0 | 5,007 |
package com.enriquegrodrigo.spark.crowd.types
import org.apache.spark.sql._
import org.apache.spark.broadcast.Broadcast
/**
* CGlad model returned by the CGlad method
*
* @param mu label estimation returned from the model.
* @param alphas precision of annotator given by the CGlad model.
* @param betas instance difficulty given by CGlad model.
* @param logLikelihood logLikelihood of the final estimation of the model.
* @author enrique.grodrigo
* @version 0.2.1
*/
class CGladModel(mu: Dataset[BinarySoftLabel],
prec: Array[Double],
diffic: Array[Double],
clusters: Dataset[ExampleCluster],
ranks: Dataset[ExampleRanks]
) extends Model[BinarySoftLabel] {
/**
* Method that returns the probabilistic estimation of the true label
*
* @return org.apache.spark.sql.Dataset
* @author enrique.grodrigo
* @version 0.1
*/
def getMu(): Dataset[BinarySoftLabel] = mu
/**
* Method that returns the annotator precision information
*
* @return Double
* @author enrique.grodrigo
* @version 0.1
*/
def getAnnotatorPrecision(): Array[Double] = prec
/**
* Method that returns information about difficulty for each cluster
*
* @return Double
* @author enrique.grodrigo
* @version 0.2.1
*/
def getClusterDifficulty(): Array[Double] = diffic
/**
* Method that returns information about ranks
*
* @return Double
* @author enrique.grodrigo
* @version 0.2.1
*/
def getRankData(): Dataset[ExampleRanks] = ranks
/**
* Method that returns information about clusters
*
* @return Double
* @author enrique.grodrigo
* @version 0.1
*/
def getClusters(): Dataset[ExampleCluster]= clusters
}
| enriquegrodrigo/spark-crowd | spark-crowd/src/main/scala/com/enriquegrodrigo/spark/crowd/types/CGladModel.scala | Scala | mit | 1,859 |
/*
* Copyright (c) 2014, Brook 'redattack34' Heisler
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the ModularRayguns team nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.castlebravostudios.rayguns.items.chambers
import com.castlebravostudios.rayguns.api.items.ItemModule
import com.castlebravostudios.rayguns.entities.effects.DeathRayEffect
import com.castlebravostudios.rayguns.mod.ModularRayguns
import com.castlebravostudios.rayguns.items.emitters.Emitters
import com.castlebravostudios.rayguns.items.misc.Tier3EmptyChamber
object DeathRayChamber extends BaseChamber {
val moduleKey = "DeathRayChamber"
val powerModifier = 5.0
val shotEffect = DeathRayEffect
val nameSegmentKey = "rayguns.DeathRayChamber.segment"
def createItem() : ItemModule = new ItemChamber( this,
Emitters.deathRayEmitter, Tier3EmptyChamber )
.setUnlocalizedName("rayguns.DeathRayChamber")
.setTextureName("rayguns:chamber_death_ray")
.setCreativeTab( ModularRayguns.raygunsTab )
.setMaxStackSize(1)
def registerShotHandlers() : Unit = {
registerSingleShotHandlers()
registerPreciseShotHandler()
}
} | Redattack34/ModularRayguns | src/main/scala/com/castlebravostudios/rayguns/items/chambers/DeathRayChamber.scala | Scala | bsd-3-clause | 2,563 |
package praxkit
import org.scalajs.dom
import dom.html
import scalajs.js.annotation._
import scalatags.JsDom.all._
@JSExportTopLevel("Calc")
object HelloWorld {
// -- view
@JSExport
def main(target: html.Div): Unit = {
target.appendChild(
div(
h3(
"""
Wie viele Termine habe ich pro Monat?
"""
),
p("Die Berechnung der Kosten erfolgt auf Basis der Anzahl Termine (Behandlungen, Sitzungen oder Konsultationen) pro Monat."),
p("Bitte ausfüllen:"),
br,
div(cls:="row")(
div(cls:="col-sm-6 col-md5")(
h5("Ich habe in der Regel"),
div(cls:="input")(span(box, " Termine pro Monat")),
div(cls:="input")(span(vaccation, " Wochen Ferien im Jahr")), br
),
div(cls:="col-sm-6 col-md5")(
h5("Kosten"),
div(cls:="input")(span(option, " mit Kalenderfunktionen")),
div(
output.render, br
)
)
),
beispiel.render
).render
)
}
val ex = Price(16, 8, false)
def aprice = price(ex)
val beispiel = div(cls:="beispiel")(
h5("Beispiel " ),
p("Wenn ich durchschnittlich " + ex.sessionsMonth + " Termine pro Monat habe und " + ex.vacations + " Wochen Ferien plane, ",
"dann bezahle ich ohne Kalenderfunktionen ", fmt(aprice._2), "pro Monat oder etwa ",
fmt(aprice._1), "pro Jahr. "
))
val box = input(
`type`:="text",
placeholder:="Termine",
value:="0",
size:="3"
).render
val vaccation = input(
`type`:="text",
placeholder:="Ferien",
value:="0",
size:="3"
).render
val option = input(
`type`:="checkbox"
).render
val output = div(id:="empty")(
ul(
li(fmt(0), "pro Monat "),
li(fmt(0), "pro Jahr ")
)
).render
// -- controller
box.onkeyup = (e: dom.Event) => {
myPrice.sessionsMonth = toInt(box.value).getOrElse(0)
val p = price(myPrice)
renderResult(p)
}
vaccation.onkeyup = (e: dom.Event) => {
myPrice.vacations = toInt(vaccation.value).getOrElse(0) % 52
val p = price(myPrice)
renderResult(p)
}
option.onchange = (e: dom.Event) => {
val choice = option.checked
//println("option: " + choice)
myPrice.withCalendar = choice
val p = price(myPrice)
renderResult(p)
}
def renderResult(p: (Double,Double)): Unit = {
//output.textContent = p._2.toString + " pro Monat, " + p._1.toString + " pro Jahr."
val resultId = "resultId"
//println(myPrice)
val result = div(id:=resultId)(
ul(
li(fmt(p._2), "pro Monat "),
if(myPrice.vacations == 0 || myPrice.sessionsMonth == 0)
li(fmt(p._1), "pro Jahr")
else
li("Etwa ", fmt(p._1), "pro Jahr ")
)
).render
if (output.hasChildNodes) {
output.replaceChild(result, output.firstChild)
//println("hasChildNodes")
}
else {
//println("no ChildNodes")
output.appendChild(div(id:= resultId).render)
output.replaceChild(result, output.firstChild)
}
}
// -- model
case class Price(
var sessionsMonth: Int,
var vacations: Int,
var withCalendar: Boolean)
@JSExportTopLevel("myPrice")
var myPrice = Price(0, 0, false)
@JSExport
def price(p: Price): (Double, Double) = {
val unitprice = p.withCalendar match {
case true => 1.85d
case _ => 1.00d
}
def workingmonth(vacs: Int): Double = {
val month = 12d
val wm = month - (vacs.toDouble / 52 * month )
wm
}
val monthprice = p.sessionsMonth * unitprice
val yearprice = monthprice * workingmonth(p.vacations)
(round(yearprice), round(monthprice))
}
def round (input: Double): Double = BigDecimal(input).setScale(2, BigDecimal.RoundingMode.HALF_UP).toDouble
def fmt(d: Double): String = f"CHF $d%10.2f "
def toInt(s: String): Option[Int] = {
try {
Some(s.toInt)
} catch {
case e: Exception => None
}
}
} | PraxKit/calc | src/main/scala/praxkit/Calc.scala | Scala | agpl-3.0 | 4,029 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.carbondata.spark.testsuite.insertQuery
import org.apache.spark.sql.{Row, SaveMode}
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
class InsertIntoNonCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
sql("drop table if exists TCarbonSource")
sql(
"create table TCarbonSource (imei string,deviceInformationId int,MAC string,deviceColor " +
"string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize " +
"string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode " +
"string,internalModels string, deliveryTime string, channelsId string, channelsName string " +
", deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity " +
"string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, " +
"ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, ActiveProvince string, " +
"Activecity string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, " +
"Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion string, " +
"Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string, " +
"Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion" +
" string, Active_phonePADPartitionedVersions string, Latest_YEAR int, Latest_MONTH int, " +
"Latest_DAY Decimal(30,10), Latest_HOUR string, Latest_areaId string, Latest_country " +
"string, Latest_province string, Latest_city string, Latest_district string, Latest_street " +
"string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion string," +
" Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string, " +
"Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, " +
"Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, " +
"Latest_operatorId string, gamePointDescription string,gamePointId double,contractNumber " +
"BigInt) STORED AS carbondata")
sql(
s"LOAD DATA INPATH '$resourcesPath/100_olap.csv' INTO table TCarbonSource options " +
"('DELIMITER'=',', 'QUOTECHAR'='\\', 'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor," +
"device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series," +
"productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId," +
"deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet," +
"oxSingleNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity," +
"ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion," +
"Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion," +
"Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion," +
"Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR," +
"Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street," +
"Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber," +
"Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer," +
"Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions," +
"Latest_operatorId,gamePointDescription,gamePointId,contractNumber', " +
"'bad_records_logger_enable'='false','bad_records_action'='FORCE')")
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_UNSAFE_IN_QUERY_EXECUTION, "true")
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE, "true")
}
test("insert into hive") {
sql("drop table if exists thive2")
sql(
"create table thive2 row format delimited fields terminated by '\\017' stored as textfile as" +
" select imei,deviceInformationId,MAC from TCarbonSource")
checkAnswer(
sql(
"select imei,deviceInformationId,MAC from TCarbonSource order by imei, " +
"deviceInformationId,MAC"),
sql("select imei,deviceInformationId,MAC from thive2 order by imei,deviceInformationId,MAC")
)
sql("drop table thive2")
}
test("insert into parquet") {
sql("drop table if exists tparquet")
sql("create table tparquet(imei string,deviceInformationId int) STORED AS PARQUET")
sql("insert into tparquet select imei,deviceInformationId from TCarbonSource")
checkAnswer(
sql("select imei,deviceInformationId from TCarbonSource order by imei,deviceInformationId"),
sql("select imei,deviceInformationId from tparquet order by imei,deviceInformationId")
)
sql("drop table tparquet")
}
test("insert into hive conditional") {
sql("drop table if exists thive_cond")
sql(
"create table thive_cond row format delimited fields terminated by '\\017' stored as " +
"textfile as SELECT(CASE WHEN imei IS NOT NULL THEN imei ELSE MAC END) AS temp FROM " +
"TCarbonSource limit 10")
checkAnswer(
sql("select count(*) from thive_cond"),
Seq(Row(10))
)
sql("drop table thive_cond")
}
test("jvm crash when insert data from datasource table to session table") {
val spark = sqlContext.sparkSession
import spark.implicits._
import scala.util.Random
val r = new Random()
val df = spark.sparkContext.parallelize(1 to 10)
.map(x => (r.nextInt(100000), "name" + x % 8, "city" + x % 50, BigDecimal.apply(x % 60)))
.toDF("ID", "name", "city", "age")
spark.sql("DROP TABLE IF EXISTS personTable")
spark.sql("DROP TABLE IF EXISTS test_table")
df.write.format("carbon").saveAsTable("personTable")
spark.sql("create table test_table(ID int, name string, city string, age decimal) STORED AS carbondata tblproperties('sort_columns'='ID')")
spark.sql("insert into test_table select * from personTable")
spark.sql("insert into test_table select * from personTable limit 2")
assert(spark.sql("select * from test_table").count() == 12)
spark.sql("DROP TABLE IF EXISTS personTable")
spark.sql("DROP TABLE IF EXISTS test_table")
}
test("jvm crash when insert data from datasource table to datasource table") {
val spark = sqlContext.sparkSession
import spark.implicits._
import scala.util.Random
val r = new Random()
val df = spark.sparkContext.parallelize(1 to 10)
.map(x => (r.nextInt(100000), "name" + x % 8, "city" + x % 50, BigDecimal.apply(x % 60)))
.toDF("ID", "name", "city", "age")
spark.sql("DROP TABLE IF EXISTS personTable")
spark.sql("DROP TABLE IF EXISTS test_table")
df.write.format("carbon").saveAsTable("personTable")
spark.sql("create table test_table(ID int, name string, city string, age decimal) using carbon")
spark.sql("insert into test_table select * from personTable")
spark.sql("insert into test_table select * from personTable limit 2")
assert(spark.sql("select * from test_table").count() == 12)
spark.sql("DROP TABLE IF EXISTS personTable")
spark.sql("DROP TABLE IF EXISTS test_table")
}
test("jvm crash when insert data from session table to datasource table") {
val spark = sqlContext.sparkSession
import spark.implicits._
import scala.util.Random
val r = new Random()
val df = spark.sparkContext.parallelize(1 to 10)
.map(x => (r.nextInt(100000), "name" + x % 8, "city" + x % 50, BigDecimal.apply(x % 60)))
.toDF("ID", "name", "city", "age")
spark.sql("DROP TABLE IF EXISTS personTable")
spark.sql("DROP TABLE IF EXISTS test_table")
df.write
.format("carbondata")
.option("tableName", "personTable")
.mode(SaveMode.Overwrite)
.save()
spark.sql("create table test_table(ID int, name string, city string, age decimal) using carbon")
spark.sql("insert into test_table select * from personTable")
spark.sql("insert into test_table select * from personTable limit 2")
assert(spark.sql("select * from test_table").count() == 12)
spark.sql("DROP TABLE IF EXISTS personTable")
spark.sql("DROP TABLE IF EXISTS test_table")
}
override def afterAll {
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_UNSAFE_IN_QUERY_EXECUTION, CarbonCommonConstants.ENABLE_UNSAFE_IN_QUERY_EXECUTION_DEFAULTVALUE)
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE, CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE_DEFAULT)
sql("DROP TABLE IF EXISTS TCarbonSource")
}
} | jackylk/incubator-carbondata | integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/insertQuery/InsertIntoNonCarbonTableTestCase.scala | Scala | apache-2.0 | 9,685 |
package com.peterpotts.snake.predicate
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, WordSpec}
import scala.util.Random
@RunWith(classOf[JUnitRunner])
class FalseTest extends WordSpec with Matchers {
"The false predicate" should {
"evaluate everything to false" in {
False(Random.nextInt()) should equal(false)
}
}
}
| peterpotts/snake | src/test/scala/com/peterpotts/snake/predicate/FalseTest.scala | Scala | mit | 395 |
package edu.eckerd.google.api.services.directory.models
case class Members(
members: Option[List[Member]],
nextPageToken: Option[String]
)
| EckerdCollege/google-api-scala | src/main/scala/edu/eckerd/google/api/services/directory/models/Members.scala | Scala | apache-2.0 | 194 |
package org.bitcoins.spvnode.messages.control
import org.bitcoins.spvnode.gen.ControlMessageGenerator
import org.scalacheck.{Prop, Properties}
/**
* Created by chris on 7/5/16.
*/
class PongMessageSpec extends Properties("PongMessageSpec") {
property("Serialization symmetry") =
Prop.forAll(ControlMessageGenerator.pongMessage) { pongMsg =>
PongMessage(pongMsg.hex) == pongMsg
}
}
| bitcoin-s/bitcoin-s-spv-node | src/test/scala/org/bitcoins/spvnode/messages/control/PongMessageSpec.scala | Scala | mit | 404 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package services.onlinetesting.phase3
import _root_.services.passmarksettings.PassMarkSettingsService
import com.google.inject.name.Named
import config.MicroserviceAppConfig
import factories.UUIDFactory
import javax.inject.{ Inject, Singleton }
import model.Phase
import model.exchange.passmarksettings.Phase3PassMarkSettings
import model.persisted.ApplicationReadyForEvaluation
import play.api.Logging
import repositories.application.GeneralApplicationRepository
import repositories.onlinetesting.OnlineTestEvaluationRepository
import repositories.passmarksettings.Phase3PassMarkSettingsMongoRepository
import scheduler.onlinetesting.EvaluateOnlineTestResultService
import services.onlinetesting.{ ApplicationStatusCalculator, CurrentSchemeStatusHelper }
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
@Singleton
class EvaluatePhase3ResultService @Inject() (@Named("Phase3EvaluationRepository") val evaluationRepository: OnlineTestEvaluationRepository,
val passMarkSettingsRepo: Phase3PassMarkSettingsMongoRepository,
val generalAppRepository: GeneralApplicationRepository,
appConfig: MicroserviceAppConfig,
val uuidFactory: UUIDFactory
) extends EvaluateOnlineTestResultService[Phase3PassMarkSettings] with Phase3TestEvaluation
with PassMarkSettingsService[Phase3PassMarkSettings] with ApplicationStatusCalculator with CurrentSchemeStatusHelper with Logging {
val phase = Phase.PHASE3
val launchpadGWConfig = appConfig.launchpadGatewayConfig
def evaluate(implicit application: ApplicationReadyForEvaluation, passmark: Phase3PassMarkSettings): Future[Unit] = {
logger.warn(s"Evaluating Phase3 appId=${application.applicationId}")
val optLaunchpadTest = application.activeLaunchpadTest
require(optLaunchpadTest.isDefined, "Active launchpad test not found")
require(application.prevPhaseEvaluation.isDefined, "Phase2 results required to evaluate Phase3")
val optLatestReviewed = optLaunchpadTest.flatMap(_.callbacks.getLatestReviewed)
val allQuestionsReviewed = optLatestReviewed.exists(_.allQuestionsReviewed)
if (launchpadGWConfig.phase3Tests.verifyAllScoresArePresent && !allQuestionsReviewed) {
val msg = s"Some of the launchpad questions are not reviewed for application Id = ${application.applicationId} so terminating evaluation"
logger.info(msg)
Future.successful(())
} else {
val schemeResults = (optLatestReviewed, application.prevPhaseEvaluation) match {
case (Some(launchpadReview), Some(prevPhaseEvaluation)) =>
evaluate(application.preferences.schemes, launchpadReview, prevPhaseEvaluation.result, passmark)
case _ => throw new IllegalStateException(s"Illegal number of phase3 active tests with results " +
s"for this application: ${application.applicationId}")
}
getSdipResults(application).flatMap { sdip =>
if (application.isSdipFaststream) {
logger.debug(s"Phase3 appId=${application.applicationId} Sdip faststream application will persist the following Sdip results " +
s"read from current scheme status: $sdip")
}
savePassMarkEvaluation(application, schemeResults ++ sdip, passmark)
}
}
}
}
| hmrc/fset-faststream | app/services/onlinetesting/phase3/EvaluatePhase3ResultService.scala | Scala | apache-2.0 | 4,062 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.serializer
import org.apache.spark.util.Utils
import com.esotericsoftware.kryo.Kryo
import org.apache.spark._
import org.apache.spark.serializer.KryoDistributedTest._
class KryoSerializerDistributedSuite extends SparkFunSuite {
//kryo对象序列在不同的进程中
test("kryo objects are serialised consistently in different processes") {
val conf = new SparkConf(false)
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.set("spark.kryo.registrator", classOf[AppJarRegistrator].getName)
//Task的最大重试次数
.set("spark.task.maxFailures", "1")
val jar = TestUtils.createJarWithClasses(List(AppJarRegistrator.customClassName))
conf.setJars(List(jar.getPath))
// val sc = new SparkContext("local-cluster[2,1,1024]", "test", conf)
val sc = new SparkContext("local[*]", "test", conf)
//Thread.currentThread().getContextClassLoader,可以获取当前线程的引用,getContextClassLoader用来获取线程的上下文类加载器
val original = Thread.currentThread.getContextClassLoader
val loader = new java.net.URLClassLoader(Array(jar), Utils.getContextOrSparkClassLoader)
SparkEnv.get.serializer.setDefaultClassLoader(loader)
val cachedRDD = sc.parallelize((0 until 10).map((_, new MyCustomClass)), 3).cache()
// Randomly mix the keys so that the join below will require a shuffle with each partition
// sending data to multiple other partitions.
//随机混合键,以便下面的连接将需要与每个分区进行shuffle将数据发送到多个其他分区。
val shuffledRDD = cachedRDD.map { case (i, o) => (i * i * i - 10 * i * i, o)}
// Join the two RDDs, and force evaluation
//加入两RDDS,影响力分析
assert(shuffledRDD.join(cachedRDD).collect().size == 1)
LocalSparkContext.stop(sc)
}
}
object KryoDistributedTest {
class MyCustomClass
class AppJarRegistrator extends KryoRegistrator {
override def registerClasses(k: Kryo) {
//Thread.currentThread().getContextClassLoader,可以获取当前线程的引用,getContextClassLoader用来获取线程的上下文类加载器
val classLoader = Thread.currentThread.getContextClassLoader
// scalastyle:off classforname
k.register(Class.forName(AppJarRegistrator.customClassName, true, classLoader))
// scalastyle:on classforname
}
}
object AppJarRegistrator {
val customClassName = "KryoSerializerDistributedSuiteCustomClass"
}
}
| tophua/spark1.52 | core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala | Scala | apache-2.0 | 3,316 |
sealed abstract class FoundNode[T]
case class A[T](x: T) extends FoundNode[T]
object Foo {
val v: (Some[_], FoundNode[_]) = (???, ???)
v match {
case (x: Some[t], _) =>
}
}
| som-snytt/dotty | tests/patmat/t6450.scala | Scala | apache-2.0 | 184 |
package sbt
package inc
import java.io.File
import org.scalacheck._
import Arbitrary._
import Gen._
import sbt.Relation
import xsbti.api._
import xsbti.SafeLazy
import xsbti.DependencyContext._
/**
* Scalacheck generators for Analysis objects and their substructures.
* Fairly complex, as Analysis has interconnected state that can't be
* independently generated.
*/
object TestCaseGenerators {
// We restrict sizes, otherwise the generated Analysis objects get huge and the tests take a long time.
val maxSources = 10 // Max number of source files.
val maxRelatives = 10 // Max number of things that a source x can relate to in a single Relation.
val maxPathSegmentLen = 10 // Max number of characters in a path segment.
val maxPathLen = 6 // Max number of path segments in a path.
// Ensure that we generate unique class names and file paths every time.
// Using repeated strings may lead to all sorts of undesirable interactions.
val used1 = scala.collection.mutable.Set.empty[String]
val used2 = scala.collection.mutable.Set.empty[String]
// When using `retryUntil`, the condition is actually tested twice (see implementation in ScalaCheck),
// which is why we need to insert twice the element.
// If the element is present in both sets, then it has already been used.
def unique[T](g: Gen[T]) = g retryUntil { o: T =>
if (used1.add(o.toString))
true
else
used2.add(o.toString)
}
def identifier: Gen[String] = sized { size =>
resize(Math.max(size, 3), Gen.identifier)
}
def genFilePathSegment: Gen[String] = for {
n <- choose(3, maxPathSegmentLen) // Segments have at least 3 characters.
c <- alphaChar
cs <- listOfN(n - 1, alphaNumChar)
} yield (c :: cs).mkString
def genFile: Gen[File] = for {
n <- choose(2, maxPathLen) // Paths have at least 2 segments.
path <- listOfN(n, genFilePathSegment)
} yield new File(path.mkString("/"))
def genStamp: Gen[Stamp] = for {
b <- oneOf(true, false)
} yield new Exists(b)
def zipMap[A, B](a: Seq[A], b: Seq[B]): Map[A, B] = (a zip b).toMap
def genStamps(rel: Relations): Gen[Stamps] = {
val prod = rel.allProducts.toList
val src = rel.allSources.toList
val bin = rel.allBinaryDeps.toList
for {
prodStamps <- listOfN(prod.length, genStamp)
srcStamps <- listOfN(src.length, genStamp)
binStamps <- listOfN(bin.length, genStamp)
binClassNames <- listOfN(bin.length, unique(identifier))
} yield Stamps(zipMap(prod, prodStamps), zipMap(src, srcStamps), zipMap(bin, binStamps), zipMap(bin, binClassNames))
}
// We need "proper" definitions with specific class names, as groupBy use these to pick a representative top-level class when splitting.
private[this] def makeDefinition(name: String): Definition =
new ClassLike(DefinitionType.ClassDef, lzy(new EmptyType()),
lzy(new Structure(lzy(Array()), lzy(Array()), lzy(Array()))), Array(), Array(),
name, new Public(), new Modifiers(false, false, false, false, false, false, false), Array())
private[this] def lzy[T <: AnyRef](x: T) = SafeLazy.strict(x)
def genNameHash(defn: String): Gen[xsbti.api._internalOnly_NameHash] =
const(new xsbti.api._internalOnly_NameHash(defn, defn.hashCode()))
def genNameHashes(defns: Seq[String]): Gen[xsbti.api._internalOnly_NameHashes] = {
def partitionAccordingToMask[T](mask: List[Boolean], xs: List[T]): (List[T], List[T]) = {
val (p1, p2) = (mask zip xs).partition(_._1)
(p1.map(_._2), p2.map(_._2))
}
val pairsOfGenerators = for (defn <- defns) yield {
for {
isRegularMember <- arbitrary[Boolean]
nameHash <- genNameHash(defn)
} yield (isRegularMember, nameHash)
}
val genNameHashesList = Gen.sequence[List, xsbti.api._internalOnly_NameHash](defns.map(genNameHash))
val genTwoListOfNameHashes = for {
nameHashesList <- genNameHashesList
isRegularMemberList <- listOfN(nameHashesList.length, arbitrary[Boolean])
} yield partitionAccordingToMask(isRegularMemberList, nameHashesList)
for {
(regularMemberNameHashes, implicitMemberNameHashes) <- genTwoListOfNameHashes
} yield new xsbti.api._internalOnly_NameHashes(regularMemberNameHashes.toArray, implicitMemberNameHashes.toArray)
}
def genSource(defns: Seq[String]): Gen[Source] = for {
startTime <- arbitrary[Long]
hashLen <- choose(10, 20) // Requred by SameAPI to be > 0.
hash <- Gen.containerOfN[Array, Byte](hashLen, arbitrary[Byte])
apiHash <- arbitrary[Int]
hasMacro <- arbitrary[Boolean]
nameHashes <- genNameHashes(defns)
} yield new Source(new Compilation(startTime, Array()), hash, new SourceAPI(Array(), Array(defns map makeDefinition: _*)), apiHash, nameHashes, hasMacro)
def genSources(all_defns: Seq[Seq[String]]): Gen[Seq[Source]] = Gen.sequence[List, Source](all_defns.map(genSource))
def genAPIs(rel: Relations): Gen[APIs] = {
val internal = rel.allInternalSrcDeps.toList.sorted
val external = rel.allExternalDeps.toList.sorted
for {
internalSources <- genSources(internal map { f: File => rel.classNames(f).toList.sorted })
externalSources <- genSources(external map { s: String => s :: Nil })
} yield APIs(zipMap(internal, internalSources), zipMap(external, externalSources))
}
def genRelation[T](g: Gen[T])(srcs: List[File]): Gen[Relation[File, T]] = for {
n <- choose(1, maxRelatives)
entries <- listOfN(srcs.length, containerOfN[Set, T](n, g))
} yield Relation.reconstruct(zipMap(srcs, entries))
val genFileRelation = genRelation[File](unique(genFile)) _
val genStringRelation = genRelation[String](unique(identifier)) _
def genRSource(srcs: List[File]): Gen[Relations.Source] = for {
internal <- listOfN(srcs.length, someOf(srcs)) // Internal dep targets must come from list of sources.
external <- genStringRelation(srcs)
} yield Relations.makeSource( // Ensure that we don't generate a dep of some file on itself.
Relation.reconstruct((srcs zip (internal map { _.toSet }) map { case (a, b) => (a, b - a) }).toMap),
external)
def genSubRSource(src: Relations.Source): Gen[Relations.Source] = for {
internal <- someOf(src.internal.all.toList)
external <- someOf(src.external.all.toList)
} yield Relations.makeSource(Relation.empty ++ internal, Relation.empty ++ external)
def genRSourceDependencies(srcs: List[File]): Gen[Relations.SourceDependencies] = for {
internal <- listOfN(srcs.length, someOf(srcs))
external <- genStringRelation(srcs)
} yield Relations.makeSourceDependencies(
Relation.reconstruct((srcs zip (internal map { _.toSet }) map { case (a, b) => (a, b - a) }).toMap),
external)
def genSubRSourceDependencies(src: Relations.SourceDependencies): Gen[Relations.SourceDependencies] = for {
internal <- someOf(src.internal.all.toList)
external <- someOf(src.external.all.toList)
} yield Relations.makeSourceDependencies(Relation.empty ++ internal, Relation.empty ++ external)
def genRelations: Gen[Relations] = for {
numSrcs <- choose(0, maxSources)
srcs <- listOfN(numSrcs, genFile)
srcProd <- genFileRelation(srcs)
binaryDep <- genFileRelation(srcs)
direct <- genRSource(srcs)
publicInherited <- genSubRSource(direct)
classes <- genStringRelation(srcs)
} yield Relations.make(srcProd, binaryDep, direct, publicInherited, classes)
def genRelationsNameHashing: Gen[Relations] = for {
numSrcs <- choose(0, maxSources)
srcs <- listOfN(numSrcs, genFile)
srcProd <- genFileRelation(srcs)
binaryDep <- genFileRelation(srcs)
memberRef <- genRSourceDependencies(srcs)
inheritance <- genSubRSourceDependencies(memberRef)
classes <- genStringRelation(srcs)
names <- genStringRelation(srcs)
internal <- InternalDependencies(Map(DependencyByMemberRef -> memberRef.internal, DependencyByInheritance -> inheritance.internal))
external <- ExternalDependencies(Map(DependencyByMemberRef -> memberRef.external, DependencyByInheritance -> inheritance.external))
} yield Relations.make(srcProd, binaryDep, internal, external, classes, names)
def genAnalysis(nameHashing: Boolean): Gen[Analysis] = for {
rels <- if (nameHashing) genRelationsNameHashing else genRelations
stamps <- genStamps(rels)
apis <- genAPIs(rels)
} yield new MAnalysis(stamps, apis, rels, SourceInfos.empty, Compilations.empty)
}
| jasonchaffee/sbt | compile/inc/src/test/scala/sbt/inc/TestCaseGenerators.scala | Scala | bsd-3-clause | 8,430 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600j.v2
import uk.gov.hmrc.ct.box.CtBoxIdentifier
abstract class J6 extends CtBoxIdentifier(name = "Tax Avoidance 6 Reference Number")
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600j/v2/J6.scala | Scala | apache-2.0 | 768 |
/*
* Copyright 2021 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.ppml.base
import com.intel.analytics.bigdl.ppml.common.{FLDataType, FLPhase, Storage}
import com.intel.analytics.bigdl.ppml.generated.FGBoostServiceProto._
import com.intel.analytics.bigdl.ppml.generated.FlBaseProto.TensorMap
import org.apache.logging.log4j.LogManager
/**
* The storage holder holding all types of Storage
* Aggregator could use this generic holder type to aggregate
* @param flDataType the instance of storage of this holder, could be one of [[FLDataType]]
*/
class StorageHolder(flDataType: FLDataType) {
private val logger = LogManager.getLogger(getClass)
private var clientDataSize: Int = 0
private var tensorMapStorage: Storage[TensorMap] = null
private var treeSplitStorage: Storage[DataSplit] = null
private var treeLeafStorage: Storage[TreeLeaf] = null
private var treeEvalStorage: Storage[java.util.List[BoostEval]] = null
flDataType match {
case FLDataType.TENSOR_MAP => tensorMapStorage = new Storage[TensorMap](flDataType.toString)
case FLDataType.TREE_SPLIT => treeSplitStorage = new Storage[DataSplit](flDataType.toString)
case FLDataType.TREE_LEAF => treeLeafStorage = new Storage[TreeLeaf](flDataType.toString)
case FLDataType.TREE_EVAL => treeEvalStorage =
new Storage[java.util.List[BoostEval]](flDataType.toString)
case _ => throw new NotImplementedError()
}
def getVersion(): Int = {
if (tensorMapStorage != null) tensorMapStorage.version
else if (treeSplitStorage != null) treeSplitStorage.version
else if (treeLeafStorage != null) treeLeafStorage.version
else if (treeEvalStorage != null) treeEvalStorage.version
else throw new NotImplementedError()
}
def getClientDataSize() = this.clientDataSize
def putClientData(clientID: String, dataHolder: DataHolder) = {
if (dataHolder.tensorMap != null) {
tensorMapStorage.clientData.put(clientID, dataHolder.tensorMap)
clientDataSize = tensorMapStorage.clientData.size()
} else if (dataHolder.split != null) {
treeSplitStorage.clientData.put(clientID, dataHolder.split)
clientDataSize = treeSplitStorage.clientData.size()
} else if (dataHolder.treeLeaf != null) {
treeLeafStorage.clientData.put(clientID, dataHolder.treeLeaf)
clientDataSize = treeLeafStorage.clientData.size()
} else if (dataHolder.boostEval != null) {
treeEvalStorage.clientData.put(clientID, dataHolder.boostEval)
clientDataSize = treeEvalStorage.clientData.size()
} else {
throw new IllegalArgumentException("Data is empty, could not uploaded to server.")
}
}
def getTensorMapStorage() = this.tensorMapStorage
def getSplitStorage() = this.treeSplitStorage
def getLeafStorage() = this.treeLeafStorage
def getBranchStorage() = this.treeEvalStorage
}
| intel-analytics/BigDL | scala/ppml/src/main/scala/com/intel/analytics/bigdl/ppml/base/StorageHolder.scala | Scala | apache-2.0 | 3,405 |
package lila.lobby
import org.joda.time.DateTime
import lila.game.Pov
import lila.user.UserRepo
private[lobby] final class AbortListener(seekApi: SeekApi) {
def apply(pov: Pov): Funit =
(pov.game.isCorrespondence ?? recreateSeek(pov)) >>-
cancelColorIncrement(pov)
private def cancelColorIncrement(pov: Pov): Unit = pov.game.userIds match {
case List(u1, u2) =>
UserRepo.incColor(u1, -1)
UserRepo.incColor(u2, 1)
case _ =>
}
private def recreateSeek(pov: Pov): Funit = pov.player.userId ?? { aborterId =>
seekApi.findArchived(pov.game.id) flatMap {
_ ?? { seek =>
(seek.user.id != aborterId) ?? seekApi.insert(Seek renew seek)
}
}
}
}
| clarkerubber/lila | modules/lobby/src/main/AbortListener.scala | Scala | agpl-3.0 | 709 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.net.{URI, URISyntaxException}
import java.text.{BreakIterator, DecimalFormat, DecimalFormatSymbols}
import java.util.{HashMap, Locale, Map => JMap}
import java.util.regex.Pattern
import scala.collection.mutable.ArrayBuffer
import org.apache.commons.codec.binary.{Base64 => CommonsBase64}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData, TypeUtils}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.UTF8StringBuilder
import org.apache.spark.unsafe.types.{ByteArray, UTF8String}
////////////////////////////////////////////////////////////////////////////////////////////////////
// This file defines expressions for string operations.
////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* An expression that concatenates multiple input strings or array of strings into a single string,
* using a given separator (the first child).
*
* Returns null if the separator is null. Otherwise, concat_ws skips all null values.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(sep, [str | array(str)]+) - Returns the concatenation of the strings separated by `sep`.",
examples = """
Examples:
> SELECT _FUNC_(' ', 'Spark', 'SQL');
Spark SQL
""",
since = "1.5.0")
// scalastyle:on line.size.limit
case class ConcatWs(children: Seq[Expression])
extends Expression with ImplicitCastInputTypes {
require(children.nonEmpty, s"$prettyName requires at least one argument.")
override def prettyName: String = "concat_ws"
/** The 1st child (separator) is str, and rest are either str or array of str. */
override def inputTypes: Seq[AbstractDataType] = {
val arrayOrStr = TypeCollection(ArrayType(StringType), StringType)
StringType +: Seq.fill(children.size - 1)(arrayOrStr)
}
override def dataType: DataType = StringType
override def nullable: Boolean = children.head.nullable
override def foldable: Boolean = children.forall(_.foldable)
override def eval(input: InternalRow): Any = {
val flatInputs = children.flatMap { child =>
child.eval(input) match {
case s: UTF8String => Iterator(s)
case arr: ArrayData => arr.toArray[UTF8String](StringType)
case null => Iterator(null.asInstanceOf[UTF8String])
}
}
UTF8String.concatWs(flatInputs.head, flatInputs.tail : _*)
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
if (children.forall(_.dataType == StringType)) {
// All children are strings. In that case we can construct a fixed size array.
val evals = children.map(_.genCode(ctx))
val separator = evals.head
val strings = evals.tail
val numArgs = strings.length
val args = ctx.freshName("args")
val inputs = strings.zipWithIndex.map { case (eval, index) =>
if (eval.isNull != TrueLiteral) {
s"""
${eval.code}
if (!${eval.isNull}) {
$args[$index] = ${eval.value};
}
"""
} else {
""
}
}
val codes = ctx.splitExpressionsWithCurrentInputs(
expressions = inputs,
funcName = "valueConcatWs",
extraArguments = ("UTF8String[]", args) :: Nil)
ev.copy(code"""
UTF8String[] $args = new UTF8String[$numArgs];
${separator.code}
$codes
UTF8String ${ev.value} = UTF8String.concatWs(${separator.value}, $args);
boolean ${ev.isNull} = ${ev.value} == null;
""")
} else {
val array = ctx.freshName("array")
val varargNum = ctx.freshName("varargNum")
val idxVararg = ctx.freshName("idxInVararg")
val evals = children.map(_.genCode(ctx))
val (varargCount, varargBuild) = children.tail.zip(evals.tail).map { case (child, eval) =>
child.dataType match {
case StringType =>
("", // we count all the StringType arguments num at once below.
if (eval.isNull == TrueLiteral) {
""
} else {
s"$array[$idxVararg ++] = ${eval.isNull} ? (UTF8String) null : ${eval.value};"
})
case _: ArrayType =>
val size = ctx.freshName("n")
if (eval.isNull == TrueLiteral) {
("", "")
} else {
(s"""
if (!${eval.isNull}) {
$varargNum += ${eval.value}.numElements();
}
""",
s"""
if (!${eval.isNull}) {
final int $size = ${eval.value}.numElements();
for (int j = 0; j < $size; j ++) {
$array[$idxVararg ++] = ${CodeGenerator.getValue(eval.value, StringType, "j")};
}
}
""")
}
}
}.unzip
val codes = ctx.splitExpressionsWithCurrentInputs(evals.map(_.code.toString))
val varargCounts = ctx.splitExpressionsWithCurrentInputs(
expressions = varargCount,
funcName = "varargCountsConcatWs",
returnType = "int",
makeSplitFunction = body =>
s"""
|int $varargNum = 0;
|$body
|return $varargNum;
""".stripMargin,
foldFunctions = _.map(funcCall => s"$varargNum += $funcCall;").mkString("\n"))
val varargBuilds = ctx.splitExpressionsWithCurrentInputs(
expressions = varargBuild,
funcName = "varargBuildsConcatWs",
extraArguments = ("UTF8String []", array) :: ("int", idxVararg) :: Nil,
returnType = "int",
makeSplitFunction = body =>
s"""
|$body
|return $idxVararg;
""".stripMargin,
foldFunctions = _.map(funcCall => s"$idxVararg = $funcCall;").mkString("\n"))
ev.copy(
code"""
$codes
int $varargNum = ${children.count(_.dataType == StringType) - 1};
int $idxVararg = 0;
$varargCounts
UTF8String[] $array = new UTF8String[$varargNum];
$varargBuilds
UTF8String ${ev.value} = UTF8String.concatWs(${evals.head.value}, $array);
boolean ${ev.isNull} = ${ev.value} == null;
""")
}
}
}
/**
* An expression that returns the `n`-th input in given inputs.
* If all inputs are binary, `elt` returns an output as binary. Otherwise, it returns as string.
* If any input is null, `elt` returns null.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(n, input1, input2, ...) - Returns the `n`-th input, e.g., returns `input2` when `n` is 2.",
examples = """
Examples:
> SELECT _FUNC_(1, 'scala', 'java');
scala
""",
since = "2.0.0")
// scalastyle:on line.size.limit
case class Elt(children: Seq[Expression]) extends Expression {
private lazy val indexExpr = children.head
private lazy val inputExprs = children.tail.toArray
/** This expression is always nullable because it returns null if index is out of range. */
override def nullable: Boolean = true
override def dataType: DataType = inputExprs.map(_.dataType).headOption.getOrElse(StringType)
override def checkInputDataTypes(): TypeCheckResult = {
if (children.size < 2) {
TypeCheckResult.TypeCheckFailure("elt function requires at least two arguments")
} else {
val (indexType, inputTypes) = (indexExpr.dataType, inputExprs.map(_.dataType))
if (indexType != IntegerType) {
return TypeCheckResult.TypeCheckFailure(s"first input to function $prettyName should " +
s"have ${IntegerType.catalogString}, but it's ${indexType.catalogString}")
}
if (inputTypes.exists(tpe => !Seq(StringType, BinaryType).contains(tpe))) {
return TypeCheckResult.TypeCheckFailure(
s"input to function $prettyName should have ${StringType.catalogString} or " +
s"${BinaryType.catalogString}, but it's " +
inputTypes.map(_.catalogString).mkString("[", ", ", "]"))
}
TypeUtils.checkForSameTypeInputExpr(inputTypes, s"function $prettyName")
}
}
override def eval(input: InternalRow): Any = {
val indexObj = indexExpr.eval(input)
if (indexObj == null) {
null
} else {
val index = indexObj.asInstanceOf[Int]
if (index <= 0 || index > inputExprs.length) {
null
} else {
inputExprs(index - 1).eval(input)
}
}
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val index = indexExpr.genCode(ctx)
val inputs = inputExprs.map(_.genCode(ctx))
val indexVal = ctx.freshName("index")
val indexMatched = ctx.freshName("eltIndexMatched")
val inputVal = ctx.addMutableState(CodeGenerator.javaType(dataType), "inputVal")
val assignInputValue = inputs.zipWithIndex.map { case (eval, index) =>
s"""
|if ($indexVal == ${index + 1}) {
| ${eval.code}
| $inputVal = ${eval.isNull} ? null : ${eval.value};
| $indexMatched = true;
| continue;
|}
""".stripMargin
}
val codes = ctx.splitExpressionsWithCurrentInputs(
expressions = assignInputValue,
funcName = "eltFunc",
extraArguments = ("int", indexVal) :: Nil,
returnType = CodeGenerator.JAVA_BOOLEAN,
makeSplitFunction = body =>
s"""
|${CodeGenerator.JAVA_BOOLEAN} $indexMatched = false;
|do {
| $body
|} while (false);
|return $indexMatched;
""".stripMargin,
foldFunctions = _.map { funcCall =>
s"""
|$indexMatched = $funcCall;
|if ($indexMatched) {
| continue;
|}
""".stripMargin
}.mkString)
ev.copy(
code"""
|${index.code}
|final int $indexVal = ${index.value};
|${CodeGenerator.JAVA_BOOLEAN} $indexMatched = false;
|$inputVal = null;
|do {
| $codes
|} while (false);
|final ${CodeGenerator.javaType(dataType)} ${ev.value} = $inputVal;
|final boolean ${ev.isNull} = ${ev.value} == null;
""".stripMargin)
}
}
trait String2StringExpression extends ImplicitCastInputTypes {
self: UnaryExpression =>
def convert(v: UTF8String): UTF8String
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(StringType)
protected override def nullSafeEval(input: Any): Any =
convert(input.asInstanceOf[UTF8String])
}
/**
* A function that converts the characters of a string to uppercase.
*/
@ExpressionDescription(
usage = "_FUNC_(str) - Returns `str` with all characters changed to uppercase.",
examples = """
Examples:
> SELECT _FUNC_('SparkSql');
SPARKSQL
""",
since = "1.0.1")
case class Upper(child: Expression)
extends UnaryExpression with String2StringExpression {
// scalastyle:off caselocale
override def convert(v: UTF8String): UTF8String = v.toUpperCase
// scalastyle:on caselocale
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c => s"($c).toUpperCase()")
}
}
/**
* A function that converts the characters of a string to lowercase.
*/
@ExpressionDescription(
usage = "_FUNC_(str) - Returns `str` with all characters changed to lowercase.",
examples = """
Examples:
> SELECT _FUNC_('SparkSql');
sparksql
""",
since = "1.0.1")
case class Lower(child: Expression) extends UnaryExpression with String2StringExpression {
// scalastyle:off caselocale
override def convert(v: UTF8String): UTF8String = v.toLowerCase
// scalastyle:on caselocale
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c => s"($c).toLowerCase()")
}
}
/** A base trait for functions that compare two strings, returning a boolean. */
abstract class StringPredicate extends BinaryExpression
with Predicate with ImplicitCastInputTypes with NullIntolerant {
def compare(l: UTF8String, r: UTF8String): Boolean
override def inputTypes: Seq[DataType] = Seq(StringType, StringType)
protected override def nullSafeEval(input1: Any, input2: Any): Any =
compare(input1.asInstanceOf[UTF8String], input2.asInstanceOf[UTF8String])
override def toString: String = s"$nodeName($left, $right)"
}
/**
* A function that returns true if the string `left` contains the string `right`.
*/
case class Contains(left: Expression, right: Expression) extends StringPredicate {
override def compare(l: UTF8String, r: UTF8String): Boolean = l.contains(r)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (c1, c2) => s"($c1).contains($c2)")
}
}
/**
* A function that returns true if the string `left` starts with the string `right`.
*/
case class StartsWith(left: Expression, right: Expression) extends StringPredicate {
override def compare(l: UTF8String, r: UTF8String): Boolean = l.startsWith(r)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (c1, c2) => s"($c1).startsWith($c2)")
}
}
/**
* A function that returns true if the string `left` ends with the string `right`.
*/
case class EndsWith(left: Expression, right: Expression) extends StringPredicate {
override def compare(l: UTF8String, r: UTF8String): Boolean = l.endsWith(r)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (c1, c2) => s"($c1).endsWith($c2)")
}
}
/**
* Replace all occurrences with string.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(str, search[, replace]) - Replaces all occurrences of `search` with `replace`.",
arguments = """
Arguments:
* str - a string expression
* search - a string expression. If `search` is not found in `str`, `str` is returned unchanged.
* replace - a string expression. If `replace` is not specified or is an empty string, nothing replaces
the string that is removed from `str`.
""",
examples = """
Examples:
> SELECT _FUNC_('ABCabc', 'abc', 'DEF');
ABCDEF
""",
since = "2.3.0")
// scalastyle:on line.size.limit
case class StringReplace(srcExpr: Expression, searchExpr: Expression, replaceExpr: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
def this(srcExpr: Expression, searchExpr: Expression) = {
this(srcExpr, searchExpr, Literal(""))
}
override def nullSafeEval(srcEval: Any, searchEval: Any, replaceEval: Any): Any = {
srcEval.asInstanceOf[UTF8String].replace(
searchEval.asInstanceOf[UTF8String], replaceEval.asInstanceOf[UTF8String])
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (src, search, replace) => {
s"""${ev.value} = $src.replace($search, $replace);"""
})
}
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(StringType, StringType, StringType)
override def children: Seq[Expression] = srcExpr :: searchExpr :: replaceExpr :: Nil
override def prettyName: String = "replace"
}
object Overlay {
def calculate(input: UTF8String, replace: UTF8String, pos: Int, len: Int): UTF8String = {
val builder = new UTF8StringBuilder
builder.append(input.substringSQL(1, pos - 1))
builder.append(replace)
// If you specify length, it must be a positive whole number or zero.
// Otherwise it will be ignored.
// The default value for length is the length of replace.
val length = if (len >= 0) {
len
} else {
replace.numChars
}
builder.append(input.substringSQL(pos + length, Int.MaxValue))
builder.build()
}
def calculate(input: Array[Byte], replace: Array[Byte], pos: Int, len: Int): Array[Byte] = {
// If you specify length, it must be a positive whole number or zero.
// Otherwise it will be ignored.
// The default value for length is the length of replace.
val length = if (len >= 0) {
len
} else {
replace.length
}
ByteArray.concat(ByteArray.subStringSQL(input, 1, pos - 1),
replace, ByteArray.subStringSQL(input, pos + length, Int.MaxValue))
}
}
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(input, replace, pos[, len]) - Replace `input` with `replace` that starts at `pos` and is of length `len`.",
examples = """
Examples:
> SELECT _FUNC_('Spark SQL' PLACING '_' FROM 6);
Spark_SQL
> SELECT _FUNC_('Spark SQL' PLACING 'CORE' FROM 7);
Spark CORE
> SELECT _FUNC_('Spark SQL' PLACING 'ANSI ' FROM 7 FOR 0);
Spark ANSI SQL
> SELECT _FUNC_('Spark SQL' PLACING 'tructured' FROM 2 FOR 4);
Structured SQL
> SELECT _FUNC_(encode('Spark SQL', 'utf-8') PLACING encode('_', 'utf-8') FROM 6);
Spark_SQL
> SELECT _FUNC_(encode('Spark SQL', 'utf-8') PLACING encode('CORE', 'utf-8') FROM 7);
Spark CORE
> SELECT _FUNC_(encode('Spark SQL', 'utf-8') PLACING encode('ANSI ', 'utf-8') FROM 7 FOR 0);
Spark ANSI SQL
> SELECT _FUNC_(encode('Spark SQL', 'utf-8') PLACING encode('tructured', 'utf-8') FROM 2 FOR 4);
Structured SQL
""")
// scalastyle:on line.size.limit
case class Overlay(input: Expression, replace: Expression, pos: Expression, len: Expression)
extends QuaternaryExpression with ImplicitCastInputTypes with NullIntolerant {
def this(str: Expression, replace: Expression, pos: Expression) = {
this(str, replace, pos, Literal.create(-1, IntegerType))
}
override def dataType: DataType = input.dataType
override def inputTypes: Seq[AbstractDataType] = Seq(TypeCollection(StringType, BinaryType),
TypeCollection(StringType, BinaryType), IntegerType, IntegerType)
override def children: Seq[Expression] = input :: replace :: pos :: len :: Nil
override def checkInputDataTypes(): TypeCheckResult = {
val inputTypeCheck = super.checkInputDataTypes()
if (inputTypeCheck.isSuccess) {
TypeUtils.checkForSameTypeInputExpr(
input.dataType :: replace.dataType :: Nil, s"function $prettyName")
} else {
inputTypeCheck
}
}
private lazy val replaceFunc = input.dataType match {
case StringType =>
(inputEval: Any, replaceEval: Any, posEval: Int, lenEval: Int) => {
Overlay.calculate(
inputEval.asInstanceOf[UTF8String],
replaceEval.asInstanceOf[UTF8String],
posEval, lenEval)
}
case BinaryType =>
(inputEval: Any, replaceEval: Any, posEval: Int, lenEval: Int) => {
Overlay.calculate(
inputEval.asInstanceOf[Array[Byte]],
replaceEval.asInstanceOf[Array[Byte]],
posEval, lenEval)
}
}
override def nullSafeEval(inputEval: Any, replaceEval: Any, posEval: Any, lenEval: Any): Any = {
replaceFunc(inputEval, replaceEval, posEval.asInstanceOf[Int], lenEval.asInstanceOf[Int])
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (input, replace, pos, len) =>
"org.apache.spark.sql.catalyst.expressions.Overlay" +
s".calculate($input, $replace, $pos, $len);")
}
}
object StringTranslate {
def buildDict(matchingString: UTF8String, replaceString: UTF8String)
: JMap[Character, Character] = {
val matching = matchingString.toString()
val replace = replaceString.toString()
val dict = new HashMap[Character, Character]()
var i = 0
while (i < matching.length()) {
val rep = if (i < replace.length()) replace.charAt(i) else '\u0000'
if (null == dict.get(matching.charAt(i))) {
dict.put(matching.charAt(i), rep)
}
i += 1
}
dict
}
}
/**
* A function translate any character in the `srcExpr` by a character in `replaceExpr`.
* The characters in `replaceExpr` is corresponding to the characters in `matchingExpr`.
* The translate will happen when any character in the string matching with the character
* in the `matchingExpr`.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(input, from, to) - Translates the `input` string by replacing the characters present in the `from` string with the corresponding characters in the `to` string.",
examples = """
Examples:
> SELECT _FUNC_('AaBbCc', 'abc', '123');
A1B2C3
""",
since = "1.5.0")
// scalastyle:on line.size.limit
case class StringTranslate(srcExpr: Expression, matchingExpr: Expression, replaceExpr: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
@transient private var lastMatching: UTF8String = _
@transient private var lastReplace: UTF8String = _
@transient private var dict: JMap[Character, Character] = _
override def nullSafeEval(srcEval: Any, matchingEval: Any, replaceEval: Any): Any = {
if (matchingEval != lastMatching || replaceEval != lastReplace) {
lastMatching = matchingEval.asInstanceOf[UTF8String].clone()
lastReplace = replaceEval.asInstanceOf[UTF8String].clone()
dict = StringTranslate.buildDict(lastMatching, lastReplace)
}
srcEval.asInstanceOf[UTF8String].translate(dict)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val classNameDict = classOf[JMap[Character, Character]].getCanonicalName
val termLastMatching = ctx.addMutableState("UTF8String", "lastMatching")
val termLastReplace = ctx.addMutableState("UTF8String", "lastReplace")
val termDict = ctx.addMutableState(classNameDict, "dict")
nullSafeCodeGen(ctx, ev, (src, matching, replace) => {
val check = if (matchingExpr.foldable && replaceExpr.foldable) {
s"$termDict == null"
} else {
s"!$matching.equals($termLastMatching) || !$replace.equals($termLastReplace)"
}
s"""if ($check) {
// Not all of them is literal or matching or replace value changed
$termLastMatching = $matching.clone();
$termLastReplace = $replace.clone();
$termDict = org.apache.spark.sql.catalyst.expressions.StringTranslate
.buildDict($termLastMatching, $termLastReplace);
}
${ev.value} = $src.translate($termDict);
"""
})
}
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(StringType, StringType, StringType)
override def children: Seq[Expression] = srcExpr :: matchingExpr :: replaceExpr :: Nil
override def prettyName: String = "translate"
}
/**
* A function that returns the index (1-based) of the given string (left) in the comma-
* delimited list (right). Returns 0, if the string wasn't found or if the given
* string (left) contains a comma.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = """
_FUNC_(str, str_array) - Returns the index (1-based) of the given string (`str`) in the comma-delimited list (`str_array`).
Returns 0, if the string was not found or if the given string (`str`) contains a comma.
""",
examples = """
Examples:
> SELECT _FUNC_('ab','abc,b,ab,c,def');
3
""",
since = "1.5.0")
// scalastyle:on line.size.limit
case class FindInSet(left: Expression, right: Expression) extends BinaryExpression
with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, StringType)
override protected def nullSafeEval(word: Any, set: Any): Any =
set.asInstanceOf[UTF8String].findInSet(word.asInstanceOf[UTF8String])
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (word, set) =>
s"${ev.value} = $set.findInSet($word);"
)
}
override def dataType: DataType = IntegerType
override def prettyName: String = "find_in_set"
}
trait String2TrimExpression extends Expression with ImplicitCastInputTypes {
override def dataType: DataType = StringType
override def inputTypes: Seq[AbstractDataType] = Seq.fill(children.size)(StringType)
override def nullable: Boolean = children.exists(_.nullable)
override def foldable: Boolean = children.forall(_.foldable)
}
object StringTrim {
def apply(str: Expression, trimStr: Expression) : StringTrim = StringTrim(str, Some(trimStr))
def apply(str: Expression) : StringTrim = StringTrim(str, None)
}
/**
* A function that takes a character string, removes the leading and trailing characters matching
* with any character in the trim string, returns the new string.
* If BOTH and trimStr keywords are not specified, it defaults to remove space character from both
* ends. The trim function will have one argument, which contains the source string.
* If BOTH and trimStr keywords are specified, it trims the characters from both ends, and the trim
* function will have two arguments, the first argument contains trimStr, the second argument
* contains the source string.
* trimStr: A character string to be trimmed from the source string, if it has multiple characters,
* the function searches for each character in the source string, removes the characters from the
* source string until it encounters the first non-match character.
* BOTH: removes any character from both ends of the source string that matches characters in the
* trim string.
*/
@ExpressionDescription(
usage = """
_FUNC_(str) - Removes the leading and trailing space characters from `str`.
_FUNC_(BOTH FROM str) - Removes the leading and trailing space characters from `str`.
_FUNC_(LEADING FROM str) - Removes the leading space characters from `str`.
_FUNC_(TRAILING FROM str) - Removes the trailing space characters from `str`.
_FUNC_(trimStr FROM str) - Remove the leading and trailing `trimStr` characters from `str`.
_FUNC_(BOTH trimStr FROM str) - Remove the leading and trailing `trimStr` characters from `str`.
_FUNC_(LEADING trimStr FROM str) - Remove the leading `trimStr` characters from `str`.
_FUNC_(TRAILING trimStr FROM str) - Remove the trailing `trimStr` characters from `str`.
""",
arguments = """
Arguments:
* str - a string expression
* trimStr - the trim string characters to trim, the default value is a single space
* BOTH, FROM - these are keywords to specify trimming string characters from both ends of
the string
* LEADING, FROM - these are keywords to specify trimming string characters from the left
end of the string
* TRAILING, FROM - these are keywords to specify trimming string characters from the right
end of the string
""",
examples = """
Examples:
> SELECT _FUNC_(' SparkSQL ');
SparkSQL
> SELECT _FUNC_(BOTH FROM ' SparkSQL ');
SparkSQL
> SELECT _FUNC_(LEADING FROM ' SparkSQL ');
SparkSQL
> SELECT _FUNC_(TRAILING FROM ' SparkSQL ');
SparkSQL
> SELECT _FUNC_('SL' FROM 'SSparkSQLS');
parkSQ
> SELECT _FUNC_(BOTH 'SL' FROM 'SSparkSQLS');
parkSQ
> SELECT _FUNC_(LEADING 'SL' FROM 'SSparkSQLS');
parkSQLS
> SELECT _FUNC_(TRAILING 'SL' FROM 'SSparkSQLS');
SSparkSQ
""",
since = "1.5.0")
case class StringTrim(
srcStr: Expression,
trimStr: Option[Expression] = None)
extends String2TrimExpression {
def this(trimStr: Expression, srcStr: Expression) = this(srcStr, Option(trimStr))
def this(srcStr: Expression) = this(srcStr, None)
override def prettyName: String = "trim"
override def children: Seq[Expression] = if (trimStr.isDefined) {
srcStr :: trimStr.get :: Nil
} else {
srcStr :: Nil
}
override def eval(input: InternalRow): Any = {
val srcString = srcStr.eval(input).asInstanceOf[UTF8String]
if (srcString == null) {
null
} else {
if (trimStr.isDefined) {
srcString.trim(trimStr.get.eval(input).asInstanceOf[UTF8String])
} else {
srcString.trim()
}
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val evals = children.map(_.genCode(ctx))
val srcString = evals(0)
if (evals.length == 1) {
ev.copy(evals.map(_.code) :+ code"""
boolean ${ev.isNull} = false;
UTF8String ${ev.value} = null;
if (${srcString.isNull}) {
${ev.isNull} = true;
} else {
${ev.value} = ${srcString.value}.trim();
}""")
} else {
val trimString = evals(1)
val getTrimFunction =
s"""
if (${trimString.isNull}) {
${ev.isNull} = true;
} else {
${ev.value} = ${srcString.value}.trim(${trimString.value});
}"""
ev.copy(evals.map(_.code) :+ code"""
boolean ${ev.isNull} = false;
UTF8String ${ev.value} = null;
if (${srcString.isNull}) {
${ev.isNull} = true;
} else {
$getTrimFunction
}""")
}
}
}
object StringTrimLeft {
def apply(str: Expression, trimStr: Expression): StringTrimLeft =
StringTrimLeft(str, Some(trimStr))
def apply(str: Expression): StringTrimLeft = StringTrimLeft(str, None)
}
/**
* A function that trims the characters from left end for a given string.
* If LEADING and trimStr keywords are not specified, it defaults to remove space character from
* the left end. The ltrim function will have one argument, which contains the source string.
* If LEADING and trimStr keywords are not specified, it trims the characters from left end. The
* ltrim function will have two arguments, the first argument contains trimStr, the second argument
* contains the source string.
* trimStr: the function removes any character from the left end of the source string which matches
* with the characters from trimStr, it stops at the first non-match character.
* LEADING: removes any character from the left end of the source string that matches characters in
* the trim string.
*/
@ExpressionDescription(
usage = """
_FUNC_(str) - Removes the leading space characters from `str`.
""",
arguments = """
Arguments:
* str - a string expression
* trimStr - the trim string characters to trim, the default value is a single space
""",
examples = """
Examples:
> SELECT _FUNC_(' SparkSQL ');
SparkSQL
""",
since = "1.5.0")
case class StringTrimLeft(
srcStr: Expression,
trimStr: Option[Expression] = None)
extends String2TrimExpression {
def this(trimStr: Expression, srcStr: Expression) = this(srcStr, Option(trimStr))
def this(srcStr: Expression) = this(srcStr, None)
override def prettyName: String = "ltrim"
override def children: Seq[Expression] = if (trimStr.isDefined) {
srcStr :: trimStr.get :: Nil
} else {
srcStr :: Nil
}
override def eval(input: InternalRow): Any = {
val srcString = srcStr.eval(input).asInstanceOf[UTF8String]
if (srcString == null) {
null
} else {
if (trimStr.isDefined) {
srcString.trimLeft(trimStr.get.eval(input).asInstanceOf[UTF8String])
} else {
srcString.trimLeft()
}
}
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val evals = children.map(_.genCode(ctx))
val srcString = evals(0)
if (evals.length == 1) {
ev.copy(evals.map(_.code) :+ code"""
boolean ${ev.isNull} = false;
UTF8String ${ev.value} = null;
if (${srcString.isNull}) {
${ev.isNull} = true;
} else {
${ev.value} = ${srcString.value}.trimLeft();
}""")
} else {
val trimString = evals(1)
val getTrimLeftFunction =
s"""
if (${trimString.isNull}) {
${ev.isNull} = true;
} else {
${ev.value} = ${srcString.value}.trimLeft(${trimString.value});
}"""
ev.copy(evals.map(_.code) :+ code"""
boolean ${ev.isNull} = false;
UTF8String ${ev.value} = null;
if (${srcString.isNull}) {
${ev.isNull} = true;
} else {
$getTrimLeftFunction
}""")
}
}
}
object StringTrimRight {
def apply(str: Expression, trimStr: Expression): StringTrimRight =
StringTrimRight(str, Some(trimStr))
def apply(str: Expression) : StringTrimRight = StringTrimRight(str, None)
}
/**
* A function that trims the characters from right end for a given string.
* If TRAILING and trimStr keywords are not specified, it defaults to remove space character
* from the right end. The rtrim function will have one argument, which contains the source string.
* If TRAILING and trimStr keywords are specified, it trims the characters from right end. The
* rtrim function will have two arguments, the first argument contains trimStr, the second argument
* contains the source string.
* trimStr: the function removes any character from the right end of source string which matches
* with the characters from trimStr, it stops at the first non-match character.
* TRAILING: removes any character from the right end of the source string that matches characters
* in the trim string.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = """
_FUNC_(str) - Removes the trailing space characters from `str`.
""",
arguments = """
Arguments:
* str - a string expression
* trimStr - the trim string characters to trim, the default value is a single space
""",
examples = """
Examples:
> SELECT _FUNC_(' SparkSQL ');
SparkSQL
""",
since = "1.5.0")
// scalastyle:on line.size.limit
case class StringTrimRight(
srcStr: Expression,
trimStr: Option[Expression] = None)
extends String2TrimExpression {
def this(trimStr: Expression, srcStr: Expression) = this(srcStr, Option(trimStr))
def this(srcStr: Expression) = this(srcStr, None)
override def prettyName: String = "rtrim"
override def children: Seq[Expression] = if (trimStr.isDefined) {
srcStr :: trimStr.get :: Nil
} else {
srcStr :: Nil
}
override def eval(input: InternalRow): Any = {
val srcString = srcStr.eval(input).asInstanceOf[UTF8String]
if (srcString == null) {
null
} else {
if (trimStr.isDefined) {
srcString.trimRight(trimStr.get.eval(input).asInstanceOf[UTF8String])
} else {
srcString.trimRight()
}
}
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val evals = children.map(_.genCode(ctx))
val srcString = evals(0)
if (evals.length == 1) {
ev.copy(evals.map(_.code) :+ code"""
boolean ${ev.isNull} = false;
UTF8String ${ev.value} = null;
if (${srcString.isNull}) {
${ev.isNull} = true;
} else {
${ev.value} = ${srcString.value}.trimRight();
}""")
} else {
val trimString = evals(1)
val getTrimRightFunction =
s"""
if (${trimString.isNull}) {
${ev.isNull} = true;
} else {
${ev.value} = ${srcString.value}.trimRight(${trimString.value});
}"""
ev.copy(evals.map(_.code) :+ code"""
boolean ${ev.isNull} = false;
UTF8String ${ev.value} = null;
if (${srcString.isNull}) {
${ev.isNull} = true;
} else {
$getTrimRightFunction
}""")
}
}
}
/**
* A function that returns the position of the first occurrence of substr in the given string.
* Returns null if either of the arguments are null and
* returns 0 if substr could not be found in str.
*
* NOTE: that this is not zero based, but 1-based index. The first character in str has index 1.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(str, substr) - Returns the (1-based) index of the first occurrence of `substr` in `str`.",
examples = """
Examples:
> SELECT _FUNC_('SparkSQL', 'SQL');
6
""",
since = "1.5.0")
// scalastyle:on line.size.limit
case class StringInstr(str: Expression, substr: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = str
override def right: Expression = substr
override def dataType: DataType = IntegerType
override def inputTypes: Seq[DataType] = Seq(StringType, StringType)
override def nullSafeEval(string: Any, sub: Any): Any = {
string.asInstanceOf[UTF8String].indexOf(sub.asInstanceOf[UTF8String], 0) + 1
}
override def prettyName: String = "instr"
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (l, r) =>
s"($l).indexOf($r, 0) + 1")
}
}
/**
* Returns the substring from string str before count occurrences of the delimiter delim.
* If count is positive, everything the left of the final delimiter (counting from left) is
* returned. If count is negative, every to the right of the final delimiter (counting from the
* right) is returned. substring_index performs a case-sensitive match when searching for delim.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = """
_FUNC_(str, delim, count) - Returns the substring from `str` before `count` occurrences of the delimiter `delim`.
If `count` is positive, everything to the left of the final delimiter (counting from the
left) is returned. If `count` is negative, everything to the right of the final delimiter
(counting from the right) is returned. The function substring_index performs a case-sensitive match
when searching for `delim`.
""",
examples = """
Examples:
> SELECT _FUNC_('www.apache.org', '.', 2);
www.apache
""",
since = "1.5.0")
// scalastyle:on line.size.limit
case class SubstringIndex(strExpr: Expression, delimExpr: Expression, countExpr: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(StringType, StringType, IntegerType)
override def children: Seq[Expression] = Seq(strExpr, delimExpr, countExpr)
override def prettyName: String = "substring_index"
override def nullSafeEval(str: Any, delim: Any, count: Any): Any = {
str.asInstanceOf[UTF8String].subStringIndex(
delim.asInstanceOf[UTF8String],
count.asInstanceOf[Int])
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (str, delim, count) => s"$str.subStringIndex($delim, $count)")
}
}
/**
* A function that returns the position of the first occurrence of substr
* in given string after position pos.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = """
_FUNC_(substr, str[, pos]) - Returns the position of the first occurrence of `substr` in `str` after position `pos`.
The given `pos` and return value are 1-based.
""",
examples = """
Examples:
> SELECT _FUNC_('bar', 'foobarbar');
4
> SELECT _FUNC_('bar', 'foobarbar', 5);
7
> SELECT POSITION('bar' IN 'foobarbar');
4
""",
since = "1.5.0")
// scalastyle:on line.size.limit
case class StringLocate(substr: Expression, str: Expression, start: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
def this(substr: Expression, str: Expression) = {
this(substr, str, Literal(1))
}
override def children: Seq[Expression] = substr :: str :: start :: Nil
override def nullable: Boolean = substr.nullable || str.nullable
override def dataType: DataType = IntegerType
override def inputTypes: Seq[DataType] = Seq(StringType, StringType, IntegerType)
override def eval(input: InternalRow): Any = {
val s = start.eval(input)
if (s == null) {
// if the start position is null, we need to return 0, (conform to Hive)
0
} else {
val r = substr.eval(input)
if (r == null) {
null
} else {
val l = str.eval(input)
if (l == null) {
null
} else {
val sVal = s.asInstanceOf[Int]
if (sVal < 1) {
0
} else {
l.asInstanceOf[UTF8String].indexOf(
r.asInstanceOf[UTF8String],
s.asInstanceOf[Int] - 1) + 1
}
}
}
}
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val substrGen = substr.genCode(ctx)
val strGen = str.genCode(ctx)
val startGen = start.genCode(ctx)
ev.copy(code = code"""
int ${ev.value} = 0;
boolean ${ev.isNull} = false;
${startGen.code}
if (!${startGen.isNull}) {
${substrGen.code}
if (!${substrGen.isNull}) {
${strGen.code}
if (!${strGen.isNull}) {
if (${startGen.value} > 0) {
${ev.value} = ${strGen.value}.indexOf(${substrGen.value},
${startGen.value} - 1) + 1;
}
} else {
${ev.isNull} = true;
}
} else {
${ev.isNull} = true;
}
}
""")
}
override def prettyName: String = "locate"
}
/**
* Returns str, left-padded with pad to a length of len.
*/
@ExpressionDescription(
usage = """
_FUNC_(str, len[, pad]) - Returns `str`, left-padded with `pad` to a length of `len`.
If `str` is longer than `len`, the return value is shortened to `len` characters.
If `pad` is not specified, `str` will be padded to the left with space characters.
""",
examples = """
Examples:
> SELECT _FUNC_('hi', 5, '??');
???hi
> SELECT _FUNC_('hi', 1, '??');
h
> SELECT _FUNC_('hi', 5);
hi
""",
since = "1.5.0")
case class StringLPad(str: Expression, len: Expression, pad: Expression = Literal(" "))
extends TernaryExpression with ImplicitCastInputTypes {
def this(str: Expression, len: Expression) = {
this(str, len, Literal(" "))
}
override def children: Seq[Expression] = str :: len :: pad :: Nil
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(StringType, IntegerType, StringType)
override def nullSafeEval(str: Any, len: Any, pad: Any): Any = {
str.asInstanceOf[UTF8String].lpad(len.asInstanceOf[Int], pad.asInstanceOf[UTF8String])
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (str, len, pad) => s"$str.lpad($len, $pad)")
}
override def prettyName: String = "lpad"
}
/**
* Returns str, right-padded with pad to a length of len.
*/
@ExpressionDescription(
usage = """
_FUNC_(str, len[, pad]) - Returns `str`, right-padded with `pad` to a length of `len`.
If `str` is longer than `len`, the return value is shortened to `len` characters.
If `pad` is not specified, `str` will be padded to the right with space characters.
""",
examples = """
Examples:
> SELECT _FUNC_('hi', 5, '??');
hi???
> SELECT _FUNC_('hi', 1, '??');
h
> SELECT _FUNC_('hi', 5);
hi
""",
since = "1.5.0")
case class StringRPad(str: Expression, len: Expression, pad: Expression = Literal(" "))
extends TernaryExpression with ImplicitCastInputTypes {
def this(str: Expression, len: Expression) = {
this(str, len, Literal(" "))
}
override def children: Seq[Expression] = str :: len :: pad :: Nil
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(StringType, IntegerType, StringType)
override def nullSafeEval(str: Any, len: Any, pad: Any): Any = {
str.asInstanceOf[UTF8String].rpad(len.asInstanceOf[Int], pad.asInstanceOf[UTF8String])
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (str, len, pad) => s"$str.rpad($len, $pad)")
}
override def prettyName: String = "rpad"
}
object ParseUrl {
private val HOST = UTF8String.fromString("HOST")
private val PATH = UTF8String.fromString("PATH")
private val QUERY = UTF8String.fromString("QUERY")
private val REF = UTF8String.fromString("REF")
private val PROTOCOL = UTF8String.fromString("PROTOCOL")
private val FILE = UTF8String.fromString("FILE")
private val AUTHORITY = UTF8String.fromString("AUTHORITY")
private val USERINFO = UTF8String.fromString("USERINFO")
private val REGEXPREFIX = "(&|^)"
private val REGEXSUBFIX = "=([^&]*)"
}
/**
* Extracts a part from a URL
*/
@ExpressionDescription(
usage = "_FUNC_(url, partToExtract[, key]) - Extracts a part from a URL.",
examples = """
Examples:
> SELECT _FUNC_('http://spark.apache.org/path?query=1', 'HOST');
spark.apache.org
> SELECT _FUNC_('http://spark.apache.org/path?query=1', 'QUERY');
query=1
> SELECT _FUNC_('http://spark.apache.org/path?query=1', 'QUERY', 'query');
1
""",
since = "2.0.0")
case class ParseUrl(children: Seq[Expression])
extends Expression with ExpectsInputTypes with CodegenFallback {
override def nullable: Boolean = true
override def inputTypes: Seq[DataType] = Seq.fill(children.size)(StringType)
override def dataType: DataType = StringType
override def prettyName: String = "parse_url"
// If the url is a constant, cache the URL object so that we don't need to convert url
// from UTF8String to String to URL for every row.
@transient private lazy val cachedUrl = children(0) match {
case Literal(url: UTF8String, _) if url ne null => getUrl(url)
case _ => null
}
// If the key is a constant, cache the Pattern object so that we don't need to convert key
// from UTF8String to String to StringBuilder to String to Pattern for every row.
@transient private lazy val cachedPattern = children(2) match {
case Literal(key: UTF8String, _) if key ne null => getPattern(key)
case _ => null
}
// If the partToExtract is a constant, cache the Extract part function so that we don't need
// to check the partToExtract for every row.
@transient private lazy val cachedExtractPartFunc = children(1) match {
case Literal(part: UTF8String, _) => getExtractPartFunc(part)
case _ => null
}
import ParseUrl._
override def checkInputDataTypes(): TypeCheckResult = {
if (children.size > 3 || children.size < 2) {
TypeCheckResult.TypeCheckFailure(s"$prettyName function requires two or three arguments")
} else {
super[ExpectsInputTypes].checkInputDataTypes()
}
}
private def getPattern(key: UTF8String): Pattern = {
Pattern.compile(REGEXPREFIX + key.toString + REGEXSUBFIX)
}
private def getUrl(url: UTF8String): URI = {
try {
new URI(url.toString)
} catch {
case e: URISyntaxException => null
}
}
private def getExtractPartFunc(partToExtract: UTF8String): URI => String = {
// partToExtract match {
// case HOST => _.toURL().getHost
// case PATH => _.toURL().getPath
// case QUERY => _.toURL().getQuery
// case REF => _.toURL().getRef
// case PROTOCOL => _.toURL().getProtocol
// case FILE => _.toURL().getFile
// case AUTHORITY => _.toURL().getAuthority
// case USERINFO => _.toURL().getUserInfo
// case _ => (url: URI) => null
// }
partToExtract match {
case HOST => _.getHost
case PATH => _.getRawPath
case QUERY => _.getRawQuery
case REF => _.getRawFragment
case PROTOCOL => _.getScheme
case FILE =>
(url: URI) =>
if (url.getRawQuery ne null) {
url.getRawPath + "?" + url.getRawQuery
} else {
url.getRawPath
}
case AUTHORITY => _.getRawAuthority
case USERINFO => _.getRawUserInfo
case _ => (url: URI) => null
}
}
private def extractValueFromQuery(query: UTF8String, pattern: Pattern): UTF8String = {
val m = pattern.matcher(query.toString)
if (m.find()) {
UTF8String.fromString(m.group(2))
} else {
null
}
}
private def extractFromUrl(url: URI, partToExtract: UTF8String): UTF8String = {
if (cachedExtractPartFunc ne null) {
UTF8String.fromString(cachedExtractPartFunc.apply(url))
} else {
UTF8String.fromString(getExtractPartFunc(partToExtract).apply(url))
}
}
private def parseUrlWithoutKey(url: UTF8String, partToExtract: UTF8String): UTF8String = {
if (cachedUrl ne null) {
extractFromUrl(cachedUrl, partToExtract)
} else {
val currentUrl = getUrl(url)
if (currentUrl ne null) {
extractFromUrl(currentUrl, partToExtract)
} else {
null
}
}
}
override def eval(input: InternalRow): Any = {
val evaluated = children.map{e => e.eval(input).asInstanceOf[UTF8String]}
if (evaluated.contains(null)) return null
if (evaluated.size == 2) {
parseUrlWithoutKey(evaluated(0), evaluated(1))
} else {
// 3-arg, i.e. QUERY with key
assert(evaluated.size == 3)
if (evaluated(1) != QUERY) {
return null
}
val query = parseUrlWithoutKey(evaluated(0), evaluated(1))
if (query eq null) {
return null
}
if (cachedPattern ne null) {
extractValueFromQuery(query, cachedPattern)
} else {
extractValueFromQuery(query, getPattern(evaluated(2)))
}
}
}
}
/**
* Returns the input formatted according do printf-style format strings
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(strfmt, obj, ...) - Returns a formatted string from printf-style format strings.",
examples = """
Examples:
> SELECT _FUNC_("Hello World %d %s", 100, "days");
Hello World 100 days
""",
since = "1.5.0")
// scalastyle:on line.size.limit
case class FormatString(children: Expression*) extends Expression with ImplicitCastInputTypes {
require(children.nonEmpty, "format_string() should take at least 1 argument")
override def foldable: Boolean = children.forall(_.foldable)
override def nullable: Boolean = children(0).nullable
override def dataType: DataType = StringType
override def inputTypes: Seq[AbstractDataType] =
StringType :: List.fill(children.size - 1)(AnyDataType)
override def eval(input: InternalRow): Any = {
val pattern = children(0).eval(input)
if (pattern == null) {
null
} else {
val sb = new StringBuffer()
val formatter = new java.util.Formatter(sb, Locale.US)
val arglist = children.tail.map(_.eval(input).asInstanceOf[AnyRef])
formatter.format(pattern.asInstanceOf[UTF8String].toString, arglist: _*)
UTF8String.fromString(sb.toString)
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val pattern = children.head.genCode(ctx)
val argListGen = children.tail.map(x => (x.dataType, x.genCode(ctx)))
val argList = ctx.freshName("argLists")
val numArgLists = argListGen.length
val argListCode = argListGen.zipWithIndex.map { case(v, index) =>
val value =
if (CodeGenerator.boxedType(v._1) != CodeGenerator.javaType(v._1)) {
// Java primitives get boxed in order to allow null values.
s"(${v._2.isNull}) ? (${CodeGenerator.boxedType(v._1)}) null : " +
s"new ${CodeGenerator.boxedType(v._1)}(${v._2.value})"
} else {
s"(${v._2.isNull}) ? null : ${v._2.value}"
}
s"""
${v._2.code}
$argList[$index] = $value;
"""
}
val argListCodes = ctx.splitExpressionsWithCurrentInputs(
expressions = argListCode,
funcName = "valueFormatString",
extraArguments = ("Object[]", argList) :: Nil)
val form = ctx.freshName("formatter")
val formatter = classOf[java.util.Formatter].getName
val sb = ctx.freshName("sb")
val stringBuffer = classOf[StringBuffer].getName
ev.copy(code = code"""
${pattern.code}
boolean ${ev.isNull} = ${pattern.isNull};
${CodeGenerator.javaType(dataType)} ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if (!${ev.isNull}) {
$stringBuffer $sb = new $stringBuffer();
$formatter $form = new $formatter($sb, ${classOf[Locale].getName}.US);
Object[] $argList = new Object[$numArgLists];
$argListCodes
$form.format(${pattern.value}.toString(), $argList);
${ev.value} = UTF8String.fromString($sb.toString());
}""")
}
override def prettyName: String = "format_string"
}
/**
* Returns string, with the first letter of each word in uppercase, all other letters in lowercase.
* Words are delimited by whitespace.
*/
@ExpressionDescription(
usage = """
_FUNC_(str) - Returns `str` with the first letter of each word in uppercase.
All other letters are in lowercase. Words are delimited by white space.
""",
examples = """
Examples:
> SELECT _FUNC_('sPark sql');
Spark Sql
""",
since = "1.5.0")
case class InitCap(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[DataType] = Seq(StringType)
override def dataType: DataType = StringType
override def nullSafeEval(string: Any): Any = {
// scalastyle:off caselocale
string.asInstanceOf[UTF8String].toLowerCase.toTitleCase
// scalastyle:on caselocale
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, str => s"$str.toLowerCase().toTitleCase()")
}
}
/**
* Returns the string which repeat the given string value n times.
*/
@ExpressionDescription(
usage = "_FUNC_(str, n) - Returns the string which repeats the given string value n times.",
examples = """
Examples:
> SELECT _FUNC_('123', 2);
123123
""",
since = "1.5.0")
case class StringRepeat(str: Expression, times: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = str
override def right: Expression = times
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(StringType, IntegerType)
override def nullSafeEval(string: Any, n: Any): Any = {
string.asInstanceOf[UTF8String].repeat(n.asInstanceOf[Integer])
}
override def prettyName: String = "repeat"
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (l, r) => s"($l).repeat($r)")
}
}
/**
* Returns a string consisting of n spaces.
*/
@ExpressionDescription(
usage = "_FUNC_(n) - Returns a string consisting of `n` spaces.",
examples = """
Examples:
> SELECT concat(_FUNC_(2), '1');
1
""",
since = "1.5.0")
case class StringSpace(child: Expression)
extends UnaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(IntegerType)
override def nullSafeEval(s: Any): Any = {
val length = s.asInstanceOf[Int]
UTF8String.blankString(if (length < 0) 0 else length)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (length) =>
s"""${ev.value} = UTF8String.blankString(($length < 0) ? 0 : $length);""")
}
override def prettyName: String = "space"
}
/**
* A function that takes a substring of its first argument starting at a given position.
* Defined for String and Binary types.
*
* NOTE: that this is not zero based, but 1-based index. The first character in str has index 1.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(str, pos[, len]) - Returns the substring of `str` that starts at `pos` and is of length `len`, or the slice of byte array that starts at `pos` and is of length `len`.",
examples = """
Examples:
> SELECT _FUNC_('Spark SQL', 5);
k SQL
> SELECT _FUNC_('Spark SQL', -3);
SQL
> SELECT _FUNC_('Spark SQL', 5, 1);
k
""",
since = "1.5.0")
// scalastyle:on line.size.limit
case class Substring(str: Expression, pos: Expression, len: Expression)
extends TernaryExpression with ImplicitCastInputTypes with NullIntolerant {
def this(str: Expression, pos: Expression) = {
this(str, pos, Literal(Integer.MAX_VALUE))
}
override def dataType: DataType = str.dataType
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(StringType, BinaryType), IntegerType, IntegerType)
override def children: Seq[Expression] = str :: pos :: len :: Nil
override def nullSafeEval(string: Any, pos: Any, len: Any): Any = {
str.dataType match {
case StringType => string.asInstanceOf[UTF8String]
.substringSQL(pos.asInstanceOf[Int], len.asInstanceOf[Int])
case BinaryType => ByteArray.subStringSQL(string.asInstanceOf[Array[Byte]],
pos.asInstanceOf[Int], len.asInstanceOf[Int])
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (string, pos, len) => {
str.dataType match {
case StringType => s"$string.substringSQL($pos, $len)"
case BinaryType => s"${classOf[ByteArray].getName}.subStringSQL($string, $pos, $len)"
}
})
}
}
/**
* Returns the rightmost n characters from the string.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(str, len) - Returns the rightmost `len`(`len` can be string type) characters from the string `str`,if `len` is less or equal than 0 the result is an empty string.",
examples = """
Examples:
> SELECT _FUNC_('Spark SQL', 3);
SQL
""",
since = "2.3.0")
// scalastyle:on line.size.limit
case class Right(str: Expression, len: Expression, child: Expression) extends RuntimeReplaceable {
def this(str: Expression, len: Expression) = {
this(str, len, If(IsNull(str), Literal(null, StringType), If(LessThanOrEqual(len, Literal(0)),
Literal(UTF8String.EMPTY_UTF8, StringType), new Substring(str, UnaryMinus(len)))))
}
override def flatArguments: Iterator[Any] = Iterator(str, len)
override def sql: String = s"$prettyName(${str.sql}, ${len.sql})"
}
/**
* Returns the leftmost n characters from the string.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(str, len) - Returns the leftmost `len`(`len` can be string type) characters from the string `str`,if `len` is less or equal than 0 the result is an empty string.",
examples = """
Examples:
> SELECT _FUNC_('Spark SQL', 3);
Spa
""",
since = "2.3.0")
// scalastyle:on line.size.limit
case class Left(str: Expression, len: Expression, child: Expression) extends RuntimeReplaceable {
def this(str: Expression, len: Expression) = {
this(str, len, Substring(str, Literal(1), len))
}
override def flatArguments: Iterator[Any] = Iterator(str, len)
override def sql: String = s"$prettyName(${str.sql}, ${len.sql})"
}
/**
* A function that returns the char length of the given string expression or
* number of bytes of the given binary expression.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the character length of string data or number of bytes of binary data. The length of string data includes the trailing spaces. The length of binary data includes binary zeros.",
examples = """
Examples:
> SELECT _FUNC_('Spark SQL ');
10
> SELECT CHAR_LENGTH('Spark SQL ');
10
> SELECT CHARACTER_LENGTH('Spark SQL ');
10
""",
since = "1.5.0")
// scalastyle:on line.size.limit
case class Length(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = IntegerType
override def inputTypes: Seq[AbstractDataType] = Seq(TypeCollection(StringType, BinaryType))
protected override def nullSafeEval(value: Any): Any = child.dataType match {
case StringType => value.asInstanceOf[UTF8String].numChars
case BinaryType => value.asInstanceOf[Array[Byte]].length
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
child.dataType match {
case StringType => defineCodeGen(ctx, ev, c => s"($c).numChars()")
case BinaryType => defineCodeGen(ctx, ev, c => s"($c).length")
}
}
}
/**
* A function that returns the bit length of the given string or binary expression.
*/
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the bit length of string data or number of bits of binary data.",
examples = """
Examples:
> SELECT _FUNC_('Spark SQL');
72
""",
since = "2.3.0")
case class BitLength(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = IntegerType
override def inputTypes: Seq[AbstractDataType] = Seq(TypeCollection(StringType, BinaryType))
protected override def nullSafeEval(value: Any): Any = child.dataType match {
case StringType => value.asInstanceOf[UTF8String].numBytes * 8
case BinaryType => value.asInstanceOf[Array[Byte]].length * 8
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
child.dataType match {
case StringType => defineCodeGen(ctx, ev, c => s"($c).numBytes() * 8")
case BinaryType => defineCodeGen(ctx, ev, c => s"($c).length * 8")
}
}
override def prettyName: String = "bit_length"
}
/**
* A function that returns the byte length of the given string or binary expression.
*/
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the byte length of string data or number of bytes of binary " +
"data.",
examples = """
Examples:
> SELECT _FUNC_('Spark SQL');
9
""",
since = "2.3.0")
case class OctetLength(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = IntegerType
override def inputTypes: Seq[AbstractDataType] = Seq(TypeCollection(StringType, BinaryType))
protected override def nullSafeEval(value: Any): Any = child.dataType match {
case StringType => value.asInstanceOf[UTF8String].numBytes
case BinaryType => value.asInstanceOf[Array[Byte]].length
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
child.dataType match {
case StringType => defineCodeGen(ctx, ev, c => s"($c).numBytes()")
case BinaryType => defineCodeGen(ctx, ev, c => s"($c).length")
}
}
override def prettyName: String = "octet_length"
}
/**
* A function that return the Levenshtein distance between the two given strings.
*/
@ExpressionDescription(
usage = "_FUNC_(str1, str2) - Returns the Levenshtein distance between the two given strings.",
examples = """
Examples:
> SELECT _FUNC_('kitten', 'sitting');
3
""",
since = "1.5.0")
case class Levenshtein(left: Expression, right: Expression) extends BinaryExpression
with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, StringType)
override def dataType: DataType = IntegerType
protected override def nullSafeEval(leftValue: Any, rightValue: Any): Any =
leftValue.asInstanceOf[UTF8String].levenshteinDistance(rightValue.asInstanceOf[UTF8String])
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (left, right) =>
s"${ev.value} = $left.levenshteinDistance($right);")
}
}
/**
* A function that return Soundex code of the given string expression.
*/
@ExpressionDescription(
usage = "_FUNC_(str) - Returns Soundex code of the string.",
examples = """
Examples:
> SELECT _FUNC_('Miller');
M460
""",
since = "1.5.0")
case class SoundEx(child: Expression) extends UnaryExpression with ExpectsInputTypes {
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(StringType)
override def nullSafeEval(input: Any): Any = input.asInstanceOf[UTF8String].soundex()
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c => s"$c.soundex()")
}
}
/**
* Returns the numeric value of the first character of str.
*/
@ExpressionDescription(
usage = "_FUNC_(str) - Returns the numeric value of the first character of `str`.",
examples = """
Examples:
> SELECT _FUNC_('222');
50
> SELECT _FUNC_(2);
50
""",
since = "1.5.0")
case class Ascii(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = IntegerType
override def inputTypes: Seq[DataType] = Seq(StringType)
protected override def nullSafeEval(string: Any): Any = {
val bytes = string.asInstanceOf[UTF8String].getBytes
if (bytes.length > 0) {
bytes(0).asInstanceOf[Int]
} else {
0
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (child) => {
val bytes = ctx.freshName("bytes")
s"""
byte[] $bytes = $child.getBytes();
if ($bytes.length > 0) {
${ev.value} = (int) $bytes[0];
} else {
${ev.value} = 0;
}
"""})
}
}
/**
* Returns the ASCII character having the binary equivalent to n.
* If n is larger than 256 the result is equivalent to chr(n % 256)
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the ASCII character having the binary equivalent to `expr`. If n is larger than 256 the result is equivalent to chr(n % 256)",
examples = """
Examples:
> SELECT _FUNC_(65);
A
""",
since = "2.3.0")
// scalastyle:on line.size.limit
case class Chr(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(LongType)
protected override def nullSafeEval(lon: Any): Any = {
val longVal = lon.asInstanceOf[Long]
if (longVal < 0) {
UTF8String.EMPTY_UTF8
} else if ((longVal & 0xFF) == 0) {
UTF8String.fromString(Character.MIN_VALUE.toString)
} else {
UTF8String.fromString((longVal & 0xFF).toChar.toString)
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, lon => {
s"""
if ($lon < 0) {
${ev.value} = UTF8String.EMPTY_UTF8;
} else if (($lon & 0xFF) == 0) {
${ev.value} = UTF8String.fromString(String.valueOf(Character.MIN_VALUE));
} else {
char c = (char)($lon & 0xFF);
${ev.value} = UTF8String.fromString(String.valueOf(c));
}
"""
})
}
}
/**
* Converts the argument from binary to a base 64 string.
*/
@ExpressionDescription(
usage = "_FUNC_(bin) - Converts the argument from a binary `bin` to a base 64 string.",
examples = """
Examples:
> SELECT _FUNC_('Spark SQL');
U3BhcmsgU1FM
""",
since = "1.5.0")
case class Base64(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(BinaryType)
protected override def nullSafeEval(bytes: Any): Any = {
UTF8String.fromBytes(CommonsBase64.encodeBase64(bytes.asInstanceOf[Array[Byte]]))
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (child) => {
s"""${ev.value} = UTF8String.fromBytes(
${classOf[CommonsBase64].getName}.encodeBase64($child));
"""})
}
}
/**
* Converts the argument from a base 64 string to BINARY.
*/
@ExpressionDescription(
usage = "_FUNC_(str) - Converts the argument from a base 64 string `str` to a binary.",
examples = """
Examples:
> SELECT _FUNC_('U3BhcmsgU1FM');
Spark SQL
""",
since = "1.5.0")
case class UnBase64(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = BinaryType
override def inputTypes: Seq[DataType] = Seq(StringType)
protected override def nullSafeEval(string: Any): Any =
CommonsBase64.decodeBase64(string.asInstanceOf[UTF8String].toString)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (child) => {
s"""
${ev.value} = ${classOf[CommonsBase64].getName}.decodeBase64($child.toString());
"""})
}
}
/**
* Decodes the first argument into a String using the provided character set
* (one of 'US-ASCII', 'ISO-8859-1', 'UTF-8', 'UTF-16BE', 'UTF-16LE', 'UTF-16').
* If either argument is null, the result will also be null.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(bin, charset) - Decodes the first argument using the second argument character set.",
examples = """
Examples:
> SELECT _FUNC_(encode('abc', 'utf-8'), 'utf-8');
abc
""",
since = "1.5.0")
// scalastyle:on line.size.limit
case class Decode(bin: Expression, charset: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = bin
override def right: Expression = charset
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(BinaryType, StringType)
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
val fromCharset = input2.asInstanceOf[UTF8String].toString
UTF8String.fromString(new String(input1.asInstanceOf[Array[Byte]], fromCharset))
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (bytes, charset) =>
s"""
try {
${ev.value} = UTF8String.fromString(new String($bytes, $charset.toString()));
} catch (java.io.UnsupportedEncodingException e) {
org.apache.spark.unsafe.Platform.throwException(e);
}
""")
}
}
/**
* Encodes the first argument into a BINARY using the provided character set
* (one of 'US-ASCII', 'ISO-8859-1', 'UTF-8', 'UTF-16BE', 'UTF-16LE', 'UTF-16').
* If either argument is null, the result will also be null.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(str, charset) - Encodes the first argument using the second argument character set.",
examples = """
Examples:
> SELECT _FUNC_('abc', 'utf-8');
abc
""",
since = "1.5.0")
// scalastyle:on line.size.limit
case class Encode(value: Expression, charset: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = value
override def right: Expression = charset
override def dataType: DataType = BinaryType
override def inputTypes: Seq[DataType] = Seq(StringType, StringType)
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
val toCharset = input2.asInstanceOf[UTF8String].toString
input1.asInstanceOf[UTF8String].toString.getBytes(toCharset)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (string, charset) =>
s"""
try {
${ev.value} = $string.toString().getBytes($charset.toString());
} catch (java.io.UnsupportedEncodingException e) {
org.apache.spark.unsafe.Platform.throwException(e);
}""")
}
}
/**
* Formats the number X to a format like '#,###,###.##', rounded to D decimal places,
* and returns the result as a string. If D is 0, the result has no decimal point or
* fractional part.
*/
@ExpressionDescription(
usage = """
_FUNC_(expr1, expr2) - Formats the number `expr1` like '#,###,###.##', rounded to `expr2`
decimal places. If `expr2` is 0, the result has no decimal point or fractional part.
`expr2` also accept a user specified format.
This is supposed to function like MySQL's FORMAT.
""",
examples = """
Examples:
> SELECT _FUNC_(12332.123456, 4);
12,332.1235
> SELECT _FUNC_(12332.123456, '##################.###');
12332.123
""",
since = "1.5.0")
case class FormatNumber(x: Expression, d: Expression)
extends BinaryExpression with ExpectsInputTypes {
override def left: Expression = x
override def right: Expression = d
override def dataType: DataType = StringType
override def nullable: Boolean = true
override def inputTypes: Seq[AbstractDataType] =
Seq(NumericType, TypeCollection(IntegerType, StringType))
private val defaultFormat = "#,###,###,###,###,###,##0"
// Associated with the pattern, for the last d value, and we will update the
// pattern (DecimalFormat) once the new coming d value differ with the last one.
// This is an Option to distinguish between 0 (numberFormat is valid) and uninitialized after
// serialization (numberFormat has not been updated for dValue = 0).
@transient
private var lastDIntValue: Option[Int] = None
@transient
private var lastDStringValue: Option[String] = None
// A cached DecimalFormat, for performance concern, we will change it
// only if the d value changed.
@transient
private lazy val pattern: StringBuffer = new StringBuffer()
// SPARK-13515: US Locale configures the DecimalFormat object to use a dot ('.')
// as a decimal separator.
@transient
private lazy val numberFormat = new DecimalFormat("", new DecimalFormatSymbols(Locale.US))
override protected def nullSafeEval(xObject: Any, dObject: Any): Any = {
right.dataType match {
case IntegerType =>
val dValue = dObject.asInstanceOf[Int]
if (dValue < 0) {
return null
}
lastDIntValue match {
case Some(last) if last == dValue =>
// use the current pattern
case _ =>
// construct a new DecimalFormat only if a new dValue
pattern.delete(0, pattern.length)
pattern.append(defaultFormat)
// decimal place
if (dValue > 0) {
pattern.append(".")
var i = 0
while (i < dValue) {
i += 1
pattern.append("0")
}
}
lastDIntValue = Some(dValue)
numberFormat.applyLocalizedPattern(pattern.toString)
}
case StringType =>
val dValue = dObject.asInstanceOf[UTF8String].toString
lastDStringValue match {
case Some(last) if last == dValue =>
case _ =>
pattern.delete(0, pattern.length)
lastDStringValue = Some(dValue)
if (dValue.isEmpty) {
numberFormat.applyLocalizedPattern(defaultFormat)
} else {
numberFormat.applyLocalizedPattern(dValue)
}
}
}
x.dataType match {
case ByteType => UTF8String.fromString(numberFormat.format(xObject.asInstanceOf[Byte]))
case ShortType => UTF8String.fromString(numberFormat.format(xObject.asInstanceOf[Short]))
case FloatType => UTF8String.fromString(numberFormat.format(xObject.asInstanceOf[Float]))
case IntegerType => UTF8String.fromString(numberFormat.format(xObject.asInstanceOf[Int]))
case LongType => UTF8String.fromString(numberFormat.format(xObject.asInstanceOf[Long]))
case DoubleType => UTF8String.fromString(numberFormat.format(xObject.asInstanceOf[Double]))
case _: DecimalType =>
UTF8String.fromString(numberFormat.format(xObject.asInstanceOf[Decimal].toJavaBigDecimal))
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (num, d) => {
def typeHelper(p: String): String = {
x.dataType match {
case _ : DecimalType => s"""$p.toJavaBigDecimal()"""
case _ => s"$p"
}
}
val sb = classOf[StringBuffer].getName
val df = classOf[DecimalFormat].getName
val dfs = classOf[DecimalFormatSymbols].getName
val l = classOf[Locale].getName
// SPARK-13515: US Locale configures the DecimalFormat object to use a dot ('.')
// as a decimal separator.
val usLocale = "US"
val numberFormat = ctx.addMutableState(df, "numberFormat",
v => s"""$v = new $df("", new $dfs($l.$usLocale));""")
right.dataType match {
case IntegerType =>
val pattern = ctx.addMutableState(sb, "pattern", v => s"$v = new $sb();")
val i = ctx.freshName("i")
val lastDValue =
ctx.addMutableState(CodeGenerator.JAVA_INT, "lastDValue", v => s"$v = -100;")
s"""
if ($d >= 0) {
$pattern.delete(0, $pattern.length());
if ($d != $lastDValue) {
$pattern.append("$defaultFormat");
if ($d > 0) {
$pattern.append(".");
for (int $i = 0; $i < $d; $i++) {
$pattern.append("0");
}
}
$lastDValue = $d;
$numberFormat.applyLocalizedPattern($pattern.toString());
}
${ev.value} = UTF8String.fromString($numberFormat.format(${typeHelper(num)}));
} else {
${ev.value} = null;
${ev.isNull} = true;
}
"""
case StringType =>
val lastDValue = ctx.addMutableState("String", "lastDValue", v => s"""$v = null;""")
val dValue = ctx.freshName("dValue")
s"""
String $dValue = $d.toString();
if (!$dValue.equals($lastDValue)) {
$lastDValue = $dValue;
if ($dValue.isEmpty()) {
$numberFormat.applyLocalizedPattern("$defaultFormat");
} else {
$numberFormat.applyLocalizedPattern($dValue);
}
}
${ev.value} = UTF8String.fromString($numberFormat.format(${typeHelper(num)}));
"""
}
})
}
override def prettyName: String = "format_number"
}
/**
* Splits a string into arrays of sentences, where each sentence is an array of words.
* The 'lang' and 'country' arguments are optional, and if omitted, the default locale is used.
*/
@ExpressionDescription(
usage = "_FUNC_(str[, lang, country]) - Splits `str` into an array of array of words.",
examples = """
Examples:
> SELECT _FUNC_('Hi there! Good morning.');
[["Hi","there"],["Good","morning"]]
""",
since = "2.0.0")
case class Sentences(
str: Expression,
language: Expression = Literal(""),
country: Expression = Literal(""))
extends Expression with ImplicitCastInputTypes with CodegenFallback {
def this(str: Expression) = this(str, Literal(""), Literal(""))
def this(str: Expression, language: Expression) = this(str, language, Literal(""))
override def nullable: Boolean = true
override def dataType: DataType =
ArrayType(ArrayType(StringType, containsNull = false), containsNull = false)
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, StringType, StringType)
override def children: Seq[Expression] = str :: language :: country :: Nil
override def eval(input: InternalRow): Any = {
val string = str.eval(input)
if (string == null) {
null
} else {
val languageStr = language.eval(input).asInstanceOf[UTF8String]
val countryStr = country.eval(input).asInstanceOf[UTF8String]
val locale = if (languageStr != null && countryStr != null) {
new Locale(languageStr.toString, countryStr.toString)
} else {
Locale.US
}
getSentences(string.asInstanceOf[UTF8String].toString, locale)
}
}
private def getSentences(sentences: String, locale: Locale) = {
val bi = BreakIterator.getSentenceInstance(locale)
bi.setText(sentences)
var idx = 0
val result = new ArrayBuffer[GenericArrayData]
while (bi.next != BreakIterator.DONE) {
val sentence = sentences.substring(idx, bi.current)
idx = bi.current
val wi = BreakIterator.getWordInstance(locale)
var widx = 0
wi.setText(sentence)
val words = new ArrayBuffer[UTF8String]
while (wi.next != BreakIterator.DONE) {
val word = sentence.substring(widx, wi.current)
widx = wi.current
if (Character.isLetterOrDigit(word.charAt(0))) words += UTF8String.fromString(word)
}
result += new GenericArrayData(words)
}
new GenericArrayData(result)
}
}
| goldmedal/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala | Scala | apache-2.0 | 81,350 |
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
import com.intellij.psi.PsiElement
import com.intellij.psi.stubs._
import com.intellij.util.ArrayUtil
import com.intellij.util.io.StringRef
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil._
/**
* @author adkozlov
*/
package object elements {
implicit class StubInputStreamExt(val dataStream: StubInputStream) extends AnyVal {
def readOptionName: Option[StringRef] = {
val isDefined = dataStream.readBoolean
if (isDefined) Some(dataStream.readName) else None
}
def readNames: Array[StringRef] = {
val length = dataStream.readInt
(0 until length).map { _ =>
dataStream.readName
}.toArray
}
}
implicit class StubOutputStreamExt(val dataStream: StubOutputStream) extends AnyVal {
def writeOptionName(maybeName: Option[String]): Unit = {
dataStream.writeBoolean(maybeName.isDefined)
maybeName.foreach {
dataStream.writeName
}
}
def writeNames(names: Array[String]): Unit = {
dataStream.writeInt(names.length)
names.foreach {
dataStream.writeName
}
}
}
implicit class MaybeStringRefExt(val maybeStringRef: Option[StringRef]) extends AnyVal {
def asString: Option[String] = maybeStringRef.map {
StringRef.toString
}.filter {
_.nonEmpty
}
}
implicit class MaybeStringExt(val maybeString: Option[String]) extends AnyVal {
def asReference: Option[StringRef] = maybeString.filter {
_.nonEmpty
}.map {
StringRef.fromString
}
}
implicit class StringRefArrayExt(val stringRefs: Array[StringRef]) extends AnyVal {
def asStrings: Array[String] = stringRefs.map {
StringRef.toString
}.filter {
_.nonEmpty
} match {
case Array() => ArrayUtil.EMPTY_STRING_ARRAY
case array => array
}
}
implicit class PsiElementsExt(val elements: Seq[PsiElement]) extends AnyVal {
def asReferences(transformText: String => String = identity): Array[StringRef] =
if (elements.nonEmpty) elements
.map(_.getText)
.map(transformText)
.map(StringRef.fromString).toArray
else StringRef.EMPTY_ARRAY
}
implicit class StringsExt(val strings: Iterable[String]) extends AnyVal {
def asReferences: Array[StringRef] = {
val result = strings.filter(_.nonEmpty)
if (result.nonEmpty) result.map(StringRef.fromString).toArray
else StringRef.EMPTY_ARRAY
}
}
implicit class SerializerExt[S <: StubElement[T], T <: PsiElement](val serializer: ScStubElementType[S, T]) extends AnyVal {
def indexStub(names: Array[String], sink: IndexSink, key: StubIndexKey[String, _ <: T]): Unit =
names.filter {
_ != null
}.map {
cleanFqn
}.filter {
_.nonEmpty
}.foreach {
sink.occurrence(key, _)
}
def indexImplicit(sink: IndexSink): Unit =
sink.occurrence(index.ScalaIndexKeys.IMPLICITS_KEY, "implicit")
}
}
| jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/stubs/elements/package.scala | Scala | apache-2.0 | 3,042 |
/* Copyright 2012 Christian Douven
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package almhirt.common
import java.util.concurrent.TimeoutException
import scala.collection.generic.CanBuildFrom
import scala.collection.mutable.Builder
import scalaz.syntax.validation._
import scalaz.Validation.FlatMap._
import scala.concurrent.{ Future, Promise, Await, ExecutionContext }
import scala.concurrent.duration.Duration
import scalaz.Validation.FlatMap
import almhirt.common._
import almhirt.almfuture.all.akkaFutureToAlmhirtFuture
import almhirt.problem.CauseIsThrowable
import almhirt.problem.HasAThrowable
/**
* A future based on [[akka.dispatch.Future]].
*
* The intention is to have a future that doesn't rely on the Either type where Left[Throwable] identifies an error.
* Instead a result should always be in a [[almhirt.validation.AlmValidation]] which is in fact a [[scalaz.Validation]]
* based on [[almhirt.validation.Problem]] as the error type
*
* Errors which would end in a Throwable end in a Problem .
*/
final class AlmFuture[+R](val underlying: Future[AlmValidation[R]]) {
import almhirt.almfuture.all._
/** Map the contents to this Future to another content */
def map[T](compute: R ⇒ T)(implicit executionContext: ExecutionContext): AlmFuture[T] =
new AlmFuture[T](underlying.map(validation ⇒ validation map compute))
/** Map the underlying validation. A failure will cause the Future to fail */
def mapV[T](compute: R ⇒ AlmValidation[T])(implicit executionContext: ExecutionContext): AlmFuture[T] =
new AlmFuture[T](underlying.map { validation ⇒ validation flatMap compute })
/** Map the underlying Problem in case of a failure */
def leftMap(withFailure: Problem ⇒ Problem)(implicit executionContext: ExecutionContext): AlmFuture[R] = {
val p = Promise[AlmValidation[R]]
underlying.onComplete {
case scala.util.Failure(exn) ⇒
p complete (scala.util.Success(withFailure(handleThrowable(exn)).failure))
case scala.util.Success(validation) ⇒
p complete (scala.util.Success(validation fold (p ⇒ withFailure(p).failure, success ⇒ success.success)))
}
new AlmFuture(p.future)
}
/** Alias for leftMap */
def mapFailure(withFailure: Problem ⇒ Problem)(implicit executionContext: ExecutionContext): AlmFuture[R] =
leftMap(withFailure)
/** Map the underlying Problem in case it is a Timeout */
def mapTimeout(withTimeout: Problem ⇒ Problem)(implicit executionContext: ExecutionContext): AlmFuture[R] = {
val p = Promise[AlmValidation[R]]
underlying.onComplete {
case scala.util.Failure(exn) ⇒
handleThrowable(exn) match {
case OperationTimedOutProblem(prob) ⇒ p complete (scala.util.Success(withTimeout(prob).failure))
case prob ⇒ p failure (exn)
}
case scala.util.Success(validation) ⇒
validation fold (
fail ⇒ fail match {
case OperationTimedOutProblem(prob) ⇒ p complete (scala.util.Success(withTimeout(prob).failure))
case prob ⇒ p complete scala.util.Success(prob.failure)
},
succ ⇒ p complete scala.util.Success(succ.success))
}
new AlmFuture(p.future)
}
/** Change the message of a timeout */
def mapTimeoutMessage(newMessage: String ⇒ String)(implicit executionContext: ExecutionContext): AlmFuture[R] =
new AlmFuture[R](underlying.map { validation ⇒
validation leftMap {
case OperationTimedOutProblem(p) ⇒
p.withMessage(newMessage(p.message))
case p ⇒
p
}
})
def flatMap[T](compute: R ⇒ AlmFuture[T])(implicit executionContext: ExecutionContext): AlmFuture[T] =
new AlmFuture(underlying.flatMap { validation ⇒
validation fold (
f ⇒ Future.successful(f.failure[T]),
r ⇒ compute(r).underlying)
})
/** This has no real usage, but is necessary for for-comprehensions */
def filter(pred: R ⇒ Boolean)(implicit executor: ExecutionContext): AlmFuture[R] =
mapV {
r ⇒ if (pred(r)) r.success else NoSuchElementProblem("AlmFuture.filter predicate is not satisfied").failure
}
final def withFilter(p: R ⇒ Boolean)(implicit executor: ExecutionContext): AlmFuture[R] = filter(p)(executor)
/** Make the result or problem something else but stay in the future context */
def fold[T](failure: Problem ⇒ T, success: R ⇒ T)(implicit executionContext: ExecutionContext): AlmFuture[T] = {
val p = Promise[AlmValidation[T]]
underlying.onComplete {
case scala.util.Failure(exn) ⇒
p complete (scala.util.Success(failure(handleThrowable(exn)).success))
case scala.util.Success(validation) ⇒
p complete (scala.util.Success((validation fold (failure, success)).success))
}
new AlmFuture(p.future)
}
/** Make the result or problem something else but stay in the future context */
def foldV[T](failure: Problem ⇒ AlmValidation[T], success: R ⇒ AlmValidation[T])(implicit executionContext: ExecutionContext): AlmFuture[T] = {
val p = Promise[AlmValidation[T]]
underlying.onComplete {
case scala.util.Failure(exn) ⇒
p complete (scala.util.Success(failure(handleThrowable(exn))))
case scala.util.Success(validation) ⇒
p complete (scala.util.Success((validation fold (failure, success))))
}
new AlmFuture(p.future)
}
/** Fold the content in the future context */
def foldF[T](failure: Problem ⇒ AlmFuture[T], success: R ⇒ AlmFuture[T])(implicit executionContext: ExecutionContext): AlmFuture[T] = {
val p = Promise[AlmValidation[T]]
underlying.onComplete {
case scala.util.Failure(exn) ⇒
p completeWith failure(handleThrowable(exn)).underlying
case scala.util.Success(validation) ⇒
p completeWith (validation fold (failure, success)).underlying
}
new AlmFuture(p.future)
}
def collect[T](pf: PartialFunction[R, T])(implicit executionContext: ExecutionContext): AlmFuture[T] =
new AlmFuture(
underlying.map(validation ⇒
validation map (v ⇒ pf(v))))
def collectV[T](pf: PartialFunction[R, AlmValidation[T]])(implicit executionContext: ExecutionContext): AlmFuture[T] =
new AlmFuture(
underlying.map(validation ⇒
validation flatMap (v ⇒ pf(v))))
def collectF[T](pf: PartialFunction[R, AlmFuture[T]])(implicit executionContext: ExecutionContext): AlmFuture[T] = {
val p = Promise[AlmValidation[T]]
underlying.onComplete {
case scala.util.Success(validation) ⇒
validation fold (
fail ⇒ p complete (scala.util.Success(fail.failure)),
succ ⇒ p completeWith (pf(succ).underlying))
case scala.util.Failure(exn) ⇒
p failure (exn)
}
new AlmFuture(p.future)
}
def mapCast[U: scala.reflect.ClassTag](implicit executionContext: ExecutionContext): AlmFuture[U] = this.mapV { almhirt.almvalidation.all.almCast[U] }
/** Act on completion */
def onComplete(handler: AlmValidation[R] ⇒ Unit)(implicit executionContext: ExecutionContext): AlmFuture[R] = {
underlying.onComplete {
case scala.util.Success(validation) ⇒ handler(validation)
case scala.util.Failure(err) ⇒ handler(handleThrowable(err).failure)
}
this
}
/** Act on completion */
def onComplete(fail: Problem ⇒ Unit, succ: R ⇒ Unit)(implicit executionContext: ExecutionContext): AlmFuture[R] = {
underlying.onComplete {
case scala.util.Success(validation) ⇒ validation fold (fail, succ)
case scala.util.Failure(err) ⇒ fail(handleThrowable(err))
}
this
}
/** Use when only interested in a success and a failure result doesn't matter */
def onSuccess(onSucc: R ⇒ Unit)(implicit executionContext: ExecutionContext): AlmFuture[R] = {
onComplete(_ fold (_ ⇒ (), onSucc))
this
}
/** As soon as a success is known, schedule the effect */
@deprecated(message = "Use onSuccess", since = "0.7.6")
def successEffect(effect: R ⇒ Unit)(implicit executionContext: ExecutionContext): AlmFuture[R] =
andThen { _.fold(_ ⇒ (), succ ⇒ effect(succ)) }
@deprecated(message = "Use onSuccessWithRecoveredFailure", since = "0.7.1")
def onSuccessWithRejoinedFailure[U >: R](rejoin: Problem ⇒ U, onRes: U ⇒ Unit)(implicit executionContext: ExecutionContext): Unit =
this.recover(rejoin).onSuccess(onRes)
/** Use when only interested in a success and a failure can be converted to a success to rejoin with the happy path */
def onSuccessWithRecoveredFailure[U >: R](rejoin: Problem ⇒ U, onRes: U ⇒ Unit)(implicit executionContext: ExecutionContext): Unit =
this.recover(rejoin).onSuccess(onRes)
/** Use when only interested in a failure and a successful result doesn't matter */
def onFailure(onProb: Problem ⇒ Unit)(implicit executionContext: ExecutionContext): AlmFuture[R] = {
onComplete(_ fold (onProb, _ ⇒ ()))
this
}
@deprecated(message = "Use onComplete", since = "0.7.6")
def andThen(effect: AlmValidation[R] ⇒ Unit)(implicit executionContext: ExecutionContext): AlmFuture[R] = {
new AlmFuture(underlying.andThen {
case scala.util.Success(r) ⇒ effect(r)
case scala.util.Failure(err) ⇒ effect(handleThrowable(err).failure)
})
}
@deprecated(message = "Use onComplete", since = "0.7.6")
def andThen(fail: Problem ⇒ Unit, succ: R ⇒ Unit)(implicit executionContext: ExecutionContext): AlmFuture[R] = {
new AlmFuture(underlying.andThen {
case scala.util.Success(r) ⇒ r.fold(fail, succ)
case scala.util.Failure(err) ⇒ fail(handleThrowable(err))
})
}
@deprecated(message = "Use onFailure", since = "0.5.210")
def withFailure(effect: Problem ⇒ Unit)(implicit executionContext: ExecutionContext): AlmFuture[R] =
failureEffect(effect)
/** As soon as a failure is known, schedule the effect */
@deprecated(message = "Use onFailure", since = "0.5.210")
def failureEffect(effect: Problem ⇒ Unit)(implicit executionContext: ExecutionContext): AlmFuture[R] =
andThen { _.fold(effect, succ ⇒ ()) }
@deprecated(message = "Use recover", since = "0.7.1")
def rejoinFailure[U >: R](rejoin: Problem ⇒ U)(implicit executionContext: ExecutionContext): AlmFuture[U] = {
this.fold[U](
rejoin,
succ ⇒ succ)
}
/** In case of a failure, rejoin with the happy path */
def recover[U >: R](recover: Problem ⇒ U)(implicit executionContext: ExecutionContext): AlmFuture[U] = {
this.fold[U](
recover,
succ ⇒ succ)
}
/** In case of a failure, rejoin with the happy path */
@deprecated(message = "Use mapOrRecover", since = "0.7.6")
def mapRecover[U](map: R ⇒ U, recover: Problem ⇒ U)(implicit executionContext: ExecutionContext): AlmFuture[U] = {
this.fold[U](
recover,
succ ⇒ map(succ))
}
/** In case of a failure, rejoin with the happy path */
def mapOrRecover[U](map: R ⇒ U, recover: Problem ⇒ U)(implicit executionContext: ExecutionContext): AlmFuture[U] = {
this.fold[U](
recover,
succ ⇒ map(succ))
}
/** extract an U from the success. In case of a failure, rejoin with the happy path */
@deprecated(message = "Use collectOrRecover", since = "0.7.6")
def collectRecover[U](collect: PartialFunction[R, U], recover: Problem ⇒ U)(implicit executionContext: ExecutionContext): AlmFuture[U] = {
this.fold[U](
recover,
succ ⇒ collect(succ))
}
/** extract an U from the success. In case of a failure, rejoin with the happy path */
def collectOrRecover[U](collect: PartialFunction[R, U], recover: Problem ⇒ U)(implicit executionContext: ExecutionContext): AlmFuture[U] = {
this.fold[U](
recover,
succ ⇒ collect(succ))
}
/** A success becomes a failure */
def divertToFailure(divert: PartialFunction[R, Problem])(implicit executionContext: ExecutionContext): AlmFuture[R] = {
this.foldV(
fail ⇒ fail.failure,
succ ⇒ if (divert.isDefinedAt(succ)) {
divert(succ).failure
} else {
succ.success
})
}
def extractDivert[U](extract: PartialFunction[R, U], divert: PartialFunction[R, Problem])(implicit executionContext: ExecutionContext): AlmFuture[U] = {
this.foldV(
fail ⇒ fail.failure,
succ ⇒ if (divert.isDefinedAt(succ)) {
divert(succ).failure
} else if (extract.isDefinedAt(succ)) {
extract(succ).success
} else {
UnspecifiedProblem(s"""${succ} is neither handled by extract nor by divert.""").failure
})
}
def isCompleted = underlying.isCompleted
def awaitResult(atMost: Duration): AlmValidation[R] =
try {
Await.result(underlying, atMost)
} catch {
case exn: Exception ⇒ launderException(exn).failure
}
def awaitResultOrEscalate(atMost: Duration): R = {
import almhirt.syntax.almvalidation._
awaitResult(atMost).resultOrEscalate
}
def timeout[S: almhirt.almfuture.ActionSchedulingMagnet](after: scala.concurrent.duration.FiniteDuration, scheduler: S)(implicit executor: ExecutionContext): AlmFuture[R] = {
AlmFuture.timeout(this)(after, scheduler)
}
/** Convert this future to a successful future containing the underlying validation that might also be a failure */
def materializedValidation(implicit executionContext: ExecutionContext): AlmFuture[AlmValidation[R]] = {
val p = Promise[AlmValidation[AlmValidation[R]]]
underlying.onComplete {
case scala.util.Failure(exn) ⇒
p complete (scala.util.Success(handleThrowable(exn).failure.success))
case scala.util.Success(validation) ⇒
p complete (scala.util.Success(validation.success))
}
new AlmFuture(p.future)
}
/** Convert this future to a future of the std lib */
def std(implicit executionContext: ExecutionContext): Future[R] = {
val p = Promise[R]
onComplete(
fail ⇒ {
val res: Throwable =
fail match {
case sp: SingleProblem ⇒
sp match {
case ExceptionCaughtProblem(_) ⇒
sp.cause match {
case Some(CauseIsThrowable(HasAThrowable(exn))) ⇒ exn
case _ ⇒ new EscalatedProblemException(sp)
}
case _ ⇒ new EscalatedProblemException(sp)
}
case pr ⇒ new EscalatedProblemException(pr)
}
p.complete(scala.util.Failure(res))
},
succ ⇒ p.complete(scala.util.Success(succ)))
p.future
}
def toStdFuture(implicit executionContext: ExecutionContext): Future[R] = this.std
}
object AlmFuture {
import scala.language.higherKinds
def timeout[T, S: almhirt.almfuture.ActionSchedulingMagnet](f: AlmFuture[T])(after: scala.concurrent.duration.FiniteDuration, scheduler: S)(implicit executor: ExecutionContext): AlmFuture[T] = {
val schedulerMagnet = implicitly[almhirt.almfuture.ActionSchedulingMagnet[S]]
val p = Promise[AlmValidation[T]]
f.underlying.onComplete(r ⇒ p.tryComplete(r))
schedulerMagnet.schedule(
scheduler,
p.tryComplete(scala.util.Success(OperationTimedOutProblem(s"A future did not complete timely(after ${after.defaultUnitString}. Be careful, timed out future might run forever.").failure)),
after,
executor)
new AlmFuture(p.future)
}
/** Start a computation which can fail */
def apply[T](compute: ⇒ AlmValidation[T])(implicit executionContext: ExecutionContext) = new AlmFuture[T](Future { compute }(executionContext))
/**
* Take an M of futures and get a future that completes as a whole sequence, once all futures in M completed
*
*/
def sequenceAkka[A, M[_] <: Traversable[_]](in: M[AlmFuture[A]])(implicit cbf: CanBuildFrom[M[AlmFuture[A]], AlmValidation[A], M[AlmValidation[A]]], executionContext: ExecutionContext): Future[M[AlmValidation[A]]] = {
in.foldLeft(Future.successful(cbf(in)): Future[Builder[AlmValidation[A], M[AlmValidation[A]]]])((futAcc, futElem) ⇒ for (acc ← futAcc; a ← futElem.asInstanceOf[AlmFuture[A]].underlying) yield (acc += a)).map(_.result)
}
/**
* Take a sequence of futures and get a future that completes as a whole sequence, once all futures in the sequence completed
*
*/
def sequence[A](in: Seq[AlmFuture[A]])(implicit executionContext: ExecutionContext): AlmFuture[Seq[A]] = {
import almhirt.almvalidation.kit._
import scalaz._, Scalaz._
val underlyings = in.map(x ⇒ x.underlying).toVector
val fut = Future.sequence(underlyings).map(seq ⇒ seq.map(_.toAgg).sequence)
new AlmFuture(fut)
}
/** Start a computation that is not expected to fail */
def compute[T](computation: ⇒ T)(implicit executionContext: ExecutionContext) = new AlmFuture[T](Future { inTryCatch(computation) })
/** Return a future where the result is already known */
def completed[T](what: ⇒ AlmValidation[T]) = new AlmFuture[T](Future.successful { unsafe(what) })
/** Return a future where the successful result is already known */
def successful[T](result: ⇒ T) = new AlmFuture[T](Future.successful { inTryCatch(result) })
/** Return a future where a failure is already known */
def failed[T](prob: ⇒ Problem) = new AlmFuture[T](Future.successful {
try {
prob.failure
} catch {
case scala.util.control.NonFatal(exn) ⇒ launderException(exn).failure
}
})
/** Returns the result after the given duration */
@deprecated("Use delayedComputation.", since = "0.7.6")
def delayed[T](duration: scala.concurrent.duration.FiniteDuration)(result: ⇒ AlmValidation[T]): AlmFuture[T] = {
val p = Promise[AlmValidation[T]]
val timer = new java.util.Timer()
val r = new java.util.TimerTask() { def run() { p.complete(scala.util.Success(result)) } }
timer.schedule(r, duration.toMillis)
new AlmFuture(p.future)
}
/** Returns the value after the given duration */
@deprecated("Use the other delayedSuccess.", since = "0.7.6")
def delayedSuccess[T](duration: scala.concurrent.duration.FiniteDuration)(result: ⇒ T): AlmFuture[T] = {
delayed(duration)(result.success)
}
/** Returns the failure with the given Problem after the given duration */
@deprecated("Use the other delayedFailure.", since = "0.7.6")
def delayedFailure[T](duration: scala.concurrent.duration.FiniteDuration)(problem: ⇒ Problem): AlmFuture[Nothing] = {
delayed(duration)(problem.failure)
}
/** Returns the result after the given duration */
def delayedResult[T, S: almhirt.almfuture.ActionSchedulingMagnet](duration: scala.concurrent.duration.FiniteDuration, scheduler: S)(result: AlmValidation[T])(implicit executor: ExecutionContext): AlmFuture[T] = {
implicit val schedulerMagnet = implicitly[almhirt.almfuture.ActionSchedulingMagnet[S]]
val p = Promise[AlmValidation[T]]
schedulerMagnet.schedule(scheduler, () ⇒ p.complete(scala.util.Success(result)), duration, executor)
new AlmFuture(p.future)
}
/** Starts computing the result after the given duration */
def delayedComputation[T, S: almhirt.almfuture.ActionSchedulingMagnet](duration: scala.concurrent.duration.FiniteDuration, scheduler: S)(result: ⇒ AlmValidation[T])(implicit executor: ExecutionContext): AlmFuture[T] = {
implicit val schedulerMagnet = implicitly[almhirt.almfuture.ActionSchedulingMagnet[S]]
val p = Promise[AlmValidation[T]]
schedulerMagnet.schedule(scheduler, () ⇒ p.complete(scala.util.Success(result)), duration, executor)
new AlmFuture(p.future)
}
/** Returns the failure with the given Problem after the given duration */
def delayedFailure[T, S: almhirt.almfuture.ActionSchedulingMagnet](duration: scala.concurrent.duration.FiniteDuration, scheduler: S)(problem: Problem)(implicit executor: ExecutionContext): AlmFuture[T] = {
delayedResult(duration, scheduler)(problem.failure)
}
/** Returns the value after the given duration */
def delayedSuccess[T, S: almhirt.almfuture.ActionSchedulingMagnet](duration: scala.concurrent.duration.FiniteDuration, scheduler: S)(result: T)(implicit executor: ExecutionContext): AlmFuture[T] = {
delayedResult(duration, scheduler)(result.success)
}
def retry[T, S: almhirt.almfuture.ActionSchedulingMagnet](policy: almhirt.configuration.RetryPolicy, scheduler: S)(f: ⇒ AlmFuture[T])(implicit executor: ExecutionContext): AlmFuture[T] =
retryScaffolding(f, policy, executor, scheduler, None)
def retryScaffolding[T, S: almhirt.almfuture.ActionSchedulingMagnet](
f: ⇒ AlmFuture[T],
settings: almhirt.configuration.RetryPolicy,
executor: ExecutionContext,
actionScheduler: S,
beforeRetry: Option[(almhirt.configuration.NumberOfRetries, scala.concurrent.duration.FiniteDuration, Problem) ⇒ Unit]): AlmFuture[T] = {
val scheduler = implicitly[almhirt.almfuture.ActionSchedulingMagnet[S]]
def scheduleAction(action: () ⇒ Unit, in: scala.concurrent.duration.FiniteDuration) =
scheduler.schedule(actionScheduler, action(), in, executor)
val p = Promise[AlmValidation[T]]
innerRetry(f, beforeRetry, None, p, settings.numberOfRetries, settings.delay.calculator, scheduleAction, executor)
new AlmFuture(p.future)
}
private def innerRetry[T](
f: ⇒ AlmFuture[T],
beforeRetry: Option[(almhirt.configuration.NumberOfRetries, scala.concurrent.duration.FiniteDuration, Problem) ⇒ Unit],
lastProblem: Option[Problem],
promise: Promise[AlmValidation[T]],
retries: almhirt.configuration.NumberOfRetries,
delayCalculator: almhirt.configuration.RetryDelayCalculator,
scheduleAction: (() ⇒ Unit, scala.concurrent.duration.FiniteDuration) ⇒ Unit,
executor: ExecutionContext) {
if (lastProblem.isDefined && !retries.hasRetriesLeft) {
promise.complete(scala.util.Success(lastProblem.get.failure))
} else {
val (nextDelay, newCalculator) =
if (lastProblem.isDefined)
delayCalculator.next
else
(Duration.Zero, delayCalculator)
beforeRetry.flatMap(reportAction ⇒ lastProblem.map((reportAction, _))).foreach { case (reportAction, lp) ⇒ reportAction(retries, nextDelay, lp) }
if (nextDelay == Duration.Zero) {
f.onComplete(
fail ⇒ innerRetry(f, beforeRetry, Some(fail), promise, retries.oneLess, newCalculator, scheduleAction, executor),
succ ⇒ promise.complete(scala.util.Success(succ.success)))(executor)
} else {
scheduleAction(() ⇒ f.onComplete(
fail ⇒ innerRetry(f, beforeRetry, Some(fail), promise, retries.oneLess, newCalculator, scheduleAction, executor),
succ ⇒ promise.complete(scala.util.Success(succ.success)))(executor), nextDelay)
}
}
}
}
| chridou/almhirt | almhirt-common/src/main/scala/almhirt/common/AlmFuture.scala | Scala | apache-2.0 | 23,400 |
import com.github.paulp.optional
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
class InvalidSwitchStubApp extends optional.Application {
var parsedCommandLine: Option [(List[String], String, Option[String])] = None
def main (compulsory: String, optional: Option[String]) {
parsedCommandLine = Some ((getArgs(), compulsory, optional))
}
}
class InvalidSwitchTestSuite extends FunSuite with ShouldMatchers {
def switch (name: String, value: Option[String]): Array[String]
= value match {
case Some(v) => Array (name, v)
case None => Array()
}
def commandLine (compulsory: Option[String], optional: Option[String], banned: Option[String]): Array[String]
= Array.concat (Array("begin"),
switch ("--compulsory", compulsory),
switch ("--optional", optional),
switch ("--banned", banned),
Array("end"))
def disallow (compulsory: Option[String], optional: Option[String], banned: Option[String]) = {
val app = new InvalidSwitchStubApp
app.main (commandLine (compulsory, optional, banned))
app.parsedCommandLine should equal (None)
}
def allow (compulsory: String, optional: Option[String]) = {
val app = new InvalidSwitchStubApp
app.main (commandLine (Some(compulsory), optional, None))
app.parsedCommandLine should equal (Some ((List ("begin", "end"), compulsory, optional)))
}
test ("no switches is error") {
disallow (None, None, None)
}
test ("optional switch only is error") {
disallow (None, Some("OPTIONAL"), None)
}
test ("no compulsory switch is error") {
disallow (None, Some("OPTIONAL"), Some("BANNED"))
}
test ("banned switch only is error") {
disallow (None, None, Some("BANNED"))
}
test ("optional switch only missing switch is error") {
disallow (Some("COMPULSORY"), None, Some("BANNED"))
}
test ("all switches is error") {
disallow (Some("COMPULSORY"), Some("OPTIONAL"), Some("BANNED"))
}
test ("banned switch only missing switch is allowed") {
allow ( "COMPULSORY", Some("OPTIONAL"))
}
test ("compulsory switch only is allowed") {
allow ( "COMPULSORY", None)
}
}
| ornicar/optional | src/test/scala/error-on-invalid-switch.scala | Scala | bsd-3-clause | 2,364 |
/*
* Copyright (c) 2011 ScalaStuff.org (joint venture of Alexander Dvorkovyy and Ruud Diterwich)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalastuff.proto
import org.scalastuff.scalabeans.Enum
import com.dyuproject.protostuff.{LinkedBuffer, ProtobufIOUtil}
import org.junit.{Assert, Test}
class MirrorSchemaTest {
val linkedBuffer = LinkedBuffer.allocate(512)
import TestFormat._
@Test
def testEmptyBean {
checkSerDeserFormats(new EmptyTestBean) {_ == _}
}
@Test
def testPrimitiveTypes {
checkFormats(() => new PrimitiveTypesBean())
}
@Test
def testSimpleTypes {
checkFormats(() => new SimpleTypesBean())
}
@Test
def testOption {
checkFormats(() => new OptionTestBean())
}
@Test
def testEnum {
checkFormats(() => new EnumTestBean())
}
@Test
def testJavaEnum {
checkFormats(() => new JavaEnumTestBean())
}
@Test
def testOptional {
checkFormats(() => new OptionalTestBean())
}
@Test
def testCompositeBean {
checkFormats(() => new CompositeTestBean())
}
}
class EmptyTestBean
class PrimitiveTypesBean extends TestBean[PrimitiveTypesBean] {
var bt: Byte = _
var s: Short = _
var i: Int = _
var l: Long = _
var bool: Boolean = _
var f: Float = _
var d: Double = _
var c: Char = _
def set1() = {
bt = 1
s = 2
i = 3
l = 4
bool = true
f = 5.0f
d = 6.0
c = 'A'
this
}
def assertEquals(other: PrimitiveTypesBean) {
Assert.assertEquals(bt, other.bt)
Assert.assertEquals(s, other.s)
Assert.assertEquals(i, other.i)
Assert.assertEquals(l, other.l)
Assert.assertEquals(bool, other.bool)
Assert.assertEquals(f, other.f, 0.1)
Assert.assertEquals(d, other.d, 0.1)
Assert.assertEquals(c, other.c)
}
}
class SimpleTypesBean extends TestBean[SimpleTypesBean] {
var s:String = ""
var bd:BigDecimal = 0
var bi:BigInt = 0
def set1() = {
s = "whatever"
bd = 1.0
bi = 2
this
}
def assertEquals(other: SimpleTypesBean) {
Assert.assertEquals(s, other.s)
Assert.assertEquals(bd, other.bd)
Assert.assertEquals(bi, other.bi)
}
}
class OptionTestBean extends TestBean[OptionTestBean] {
var p: Option[Int] = None
var r1: Option[String] = None
var r2: Option[String] = None
def set1() = {
p = Some(1)
r1 = Some("whatever")
r2 = Some("")
this
}
def assertEquals(other: OptionTestBean) {
Assert.assertEquals(p, other.p)
Assert.assertEquals(r1, other.r1)
Assert.assertEquals(r2, other.r2)
}
}
class Gender private ()
object Gender extends Enum[Gender] {
val Unknown = new Gender
val M = new Gender
val F = new Gender
}
class EnumTestBean extends TestBean[EnumTestBean] {
var e: Gender = Gender.Unknown
def set1() = {
e = Gender.M
this
}
def assertEquals(other: EnumTestBean) {
Assert.assertEquals(e, other.e)
}
}
class JavaEnumTestBean extends TestBean[JavaEnumTestBean] {
import java.lang.annotation.RetentionPolicy
var e: RetentionPolicy = RetentionPolicy.CLASS
def set1() = {
e = RetentionPolicy.RUNTIME
this
}
def assertEquals(other: JavaEnumTestBean) {
Assert.assertEquals(e, other.e)
}
}
class OptionalTestBean extends TestBean[OptionalTestBean] {
var bt: Byte = 1
var s: Short = 2
var i: Int = 3
var l: Long = 4
var bool: Boolean = true
var f: Float = 6.0f
var d: Double = 7.0
var c: Char = 'A'
var str: String = "whatever"
var bd:BigDecimal = 8.0
var bi:BigInt = 9
var timestamp: java.util.Date = new java.util.Date()
var dateTime: java.sql.Timestamp = java.sql.Timestamp.valueOf("2011-01-30 10:30:59")
var date: java.sql.Date = java.sql.Date.valueOf("2010-10-25")
var op: Option[Long] = Some(10)
var or1: Option[String] = Some("11")
var or2: Option[String] = Some("12")
def set1() = {
bt = 0
s = 0
i = 0
l = 0
bool = false
f = 0f
d = 0.0
c = 0
str = ""
bd = 0
bi = 0
timestamp = new java.util.Date(0)
dateTime = new java.sql.Timestamp(0)
date = new java.sql.Date(0)
op = None
or1 = None
or2 = Some("")
this
}
def assertEquals(other: OptionalTestBean) {
Assert.assertEquals(bt, other.bt)
Assert.assertEquals(s, other.s)
Assert.assertEquals(i, other.i)
Assert.assertEquals(l, other.l)
Assert.assertEquals(bool, other.bool)
Assert.assertEquals(f, other.f, 0.1)
Assert.assertEquals(d, other.d, 0.1)
Assert.assertEquals(c, other.c)
Assert.assertEquals(str, other.str)
Assert.assertEquals(bd, other.bd)
Assert.assertEquals(bi, other.bi)
Assert.assertEquals(timestamp, other.timestamp)
Assert.assertEquals(dateTime, other.dateTime)
Assert.assertEquals(date, other.date)
Assert.assertEquals(op, other.op)
Assert.assertEquals(or1, other.or1)
Assert.assertEquals(or2, other.or2)
}
}
class CompositeTestBean extends TestBean[CompositeTestBean] {
var e: EmptyTestBean = new EmptyTestBean
var pbt: PrimitiveTypesBean = new PrimitiveTypesBean
var stp: SimpleTypesBean = new SimpleTypesBean
var opt: OptionTestBean = new OptionTestBean
var optl: OptionalTestBean = new OptionalTestBean
var enumt: EnumTestBean = new EnumTestBean
def set1() = {
pbt.set1()
stp.set1()
opt.set1()
optl.set1()
enumt.set1()
this
}
def assertEquals(other: CompositeTestBean) {
pbt.assertEquals(other.pbt)
stp.assertEquals(other.stp)
opt.assertEquals(other.opt)
optl.assertEquals(other.optl)
enumt.assertEquals(other.enumt)
}
} | scalastuff/scalabeans | src/test/scala/org/scalastuff/proto/MirrorSchemaTest.scala | Scala | apache-2.0 | 6,389 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.scala.dsl
import builder.{RouteBuilder, RouteBuilderSupport}
import org.apache.camel.processor.routingslip.RoutingSlipTest
/**
* Scala DSL equivalent for the org.apache.camel.processor.routingslip.RoutingSlipTest
*/
class SRoutingSlipTest extends RoutingSlipTest with RouteBuilderSupport {
override def createRouteBuilder = new RouteBuilder {
"direct:a" routingSlip("myHeader") to "mock:end"
"direct:b" ==> {
routingSlip("aRoutingSlipHeader")
}
"direct:c" routingSlip("aRoutingSlipHeader", "#")
"direct:d" routingSlip(_.in[String])
}
}
| logzio/camel | components/camel-scala/src/test/scala/org/apache/camel/scala/dsl/SRoutingSlipTest.scala | Scala | apache-2.0 | 1,408 |
package dk.tennis.dbn.clustergraph
import dk.bayes.factor.Var
import dk.bayes.clustergraph.ClusterGraph
trait TennisClusterGraph {
def getEvidenceMap(): Map[Int, Int]
def setEvidence(varId: Int, won: Boolean)
def addPriorSkillCluster(timeSlice: Int): TennisVar
def addTransitionSkillCluster(varFrom: TennisVar, timeSliceTo: Int): Seq[TennisVar]
def addMatchCluster(playerVar1: TennisVar, playerVar2: TennisVar): TennisVar
def getClusterGraph(): ClusterGraph
def getPriorParameter(): Array[Double]
def getEmissionParameter(): Array[Double]
def getTransitionParameter(): Array[Double]
} | danielkorzekwa/tennis-rating-dbn-em-scala | src/main/scala/dk/tennis/dbn/clustergraph/TennisClusterGraph.scala | Scala | bsd-2-clause | 629 |
package amora.backend.indexer
import java.io.ByteArrayOutputStream
import java.io.PrintWriter
import java.net.URI
import scala.util.Failure
import scala.util.Success
import scala.util.Try
import amora.backend.Logger
import javax.tools.FileObject
import javax.tools.JavaFileManager
import javax.tools.JavaFileObject
import javax.tools.SimpleJavaFileObject
import javax.tools.StandardJavaFileManager
import javax.tools.ToolProvider
import amora.converter.ClassfileConverter
import amora.converter.protocol.Hierarchy
/**
* Provides functionality to extract information out of Java bytecode.
*/
final class JavaBytecodeIndexer(logger: Logger) {
/**
* Converts pairs of file names and Java sources to pairs of file names and
* hierarchies.
*/
def bytecodeToHierarchy(data: Seq[(String, String)]): Try[Seq[(String, Seq[Hierarchy])]] = Try {
val c = ToolProvider.getSystemJavaCompiler
val vfm = new VirtualJavaFileManager(c.getStandardFileManager(null, null, null))
data flatMap {
case (filename, src) ⇒
require(filename.endsWith(".java"), "Only Java files are currently supported")
val s = new ByteArrayOutputStream
val pw = new PrintWriter(s)
c.getTask(
pw, vfm, null,
/* options */ java.util.Arrays.asList("-parameters"),
null,
/* compilationUnits */ java.util.Arrays.asList(new VirtualJavaFile(filename, src))
).call()
val errorMsgs = new String(s.toByteArray(), "UTF-8")
if (errorMsgs.nonEmpty)
throw new IllegalStateException(s"Errors occurred during compilation of file `$filename`:\\n$errorMsgs")
vfm.buffers.map {
case (_, bytecode) ⇒
new ClassfileConverter(_ ⇒ ()).convert(bytecode) match {
case Success(res) ⇒
filename → res
case Failure(f) ⇒
throw f
}
}
}
}
/**
* Represents an in memory Java file by its name and its source code.
*/
private final class VirtualJavaFile(name: String, code: String)
extends SimpleJavaFileObject(URI.create(s"string:///$name"), JavaFileObject.Kind.SOURCE) {
override def getCharContent(ignoreEncodingErrors: Boolean): CharSequence = code
}
/**
* Wraps a `StandardJavaFileManager` in order to allow us to access the
* bytecode that is generated by the Java compiler.
*/
private final class VirtualJavaFileManager(underlying: StandardJavaFileManager) extends JavaFileManager {
import javax.tools.JavaFileManager._
import javax.tools.JavaFileObject._
private var bs = Map[String, ByteArrayOutputStream]()
def buffers = bs.map { case (k, v) ⇒ k → v.toByteArray() }.toList
override def getJavaFileForOutput(location: Location, className: String, kind: Kind, sibling: FileObject): JavaFileObject = {
new SimpleJavaFileObject(URI.create(className), kind) {
override def openOutputStream = {
val s = new ByteArrayOutputStream
bs += className → s
s
}
}
}
// the following methods forward to the default implementation
override def getJavaFileForInput(location: Location, className: String, kind: Kind): JavaFileObject = underlying.getJavaFileForInput(location, className, kind)
override def getClassLoader(location: Location): ClassLoader = underlying.getClassLoader(location)
override def close(): Unit = underlying.close()
override def flush(): Unit = underlying.flush()
override def getFileForInput(x$1: Location, x$2: String, x$3: String): FileObject = underlying.getFileForInput(x$1, x$2, x$3)
override def getFileForOutput(x$1: Location, x$2: String, x$3: String, x$4: FileObject): FileObject = underlying.getFileForOutput(x$1, x$2, x$3, x$4)
override def handleOption(current: String, remaining: java.util.Iterator[String]): Boolean = underlying.handleOption(current, remaining)
override def hasLocation(x$1: Location): Boolean = underlying.hasLocation(x$1)
override def inferBinaryName(x$1: Location, x$2: JavaFileObject): String = underlying.inferBinaryName(x$1, x$2)
override def isSameFile(a: FileObject, b: FileObject): Boolean = underlying.isSameFile(a, b)
override def isSupportedOption(option: String): Int = underlying.isSupportedOption(option)
override def list(x$1: Location, x$2: String, x$3: java.util.Set[Kind], x$4: Boolean): java.lang.Iterable[JavaFileObject] = underlying.list(x$1, x$2, x$3, x$4)
}
}
| sschaef/scalajs-test | backend/src/main/scala/amora/backend/indexer/JavaBytecodeIndexer.scala | Scala | mit | 4,518 |
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.libs.ws
import akka.stream.scaladsl.Source
import akka.util.ByteString
import play.api.mvc.MultipartFormData
import play.core.formatters.Multipart
/**
* JSON, XML and Multipart Form Data Writables used for Play-WS bodies.
*/
trait WSBodyWritables extends DefaultBodyWritables with JsonBodyWritables with XMLBodyWritables {
implicit val bodyWritableOf_Multipart: BodyWritable[Source[MultipartFormData.Part[Source[ByteString, _]], _]] = {
val boundary = Multipart.randomBoundary()
val contentType = s"multipart/form-data; boundary=$boundary"
BodyWritable(b => SourceBody(Multipart.transform(b, boundary)), contentType)
}
}
object WSBodyWritables extends WSBodyWritables
| Shenker93/playframework | framework/src/play-ws/src/main/scala/play/api/libs/ws/WSBodyWritables.scala | Scala | apache-2.0 | 786 |
package io.simao.util
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Try, Success, Failure}
object TryOps {
implicit def eitherToTry[T](either: Either[Throwable, T]): Try[T] = either match {
case Left(t) ⇒ Failure(t)
case Right(v) ⇒ Success(v)
}
implicit def futureEitherToTry[T](futureEither: Future[Either[Throwable, T]])(implicit ec: ExecutionContext): Future[Try[T]] = {
futureEither.map(eitherToTry)
}
}
| simao/lobsters | src/main/scala/io/simao/util/TryOps.scala | Scala | mit | 459 |
package codecheck.github.models
import org.json4s.JValue
import org.json4s.JNothing
import org.json4s.JNull
import org.json4s.JObject
import org.json4s.JArray
import org.json4s.Formats
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods
import codecheck.github.utils.Json4s.formats
import org.joda.time.DateTime
class AbstractJson(value: JValue) {
def opt(path: String): Option[String] = {
path.split("\\.").foldLeft(value) { (v, s) =>
v \ s
} match {
case JNothing => None
case JNull => None
case v: JValue => Some(v.extract[String])
}
}
def get(path: String) = opt(path).getOrElse("")
def dateOpt(path: String): Option[DateTime] = {
path.split("\\.").foldLeft(value) { (v, s) =>
v \ s
} match {
case JNothing => None
case JNull => None
case v: JValue => Some(v.extract[DateTime])
}
}
def getDate(path: String): DateTime = dateOpt(path).get
def booleanOpt(path: String): Option[Boolean] = {
path.split("\\.").foldLeft(value) { (v, s) =>
v \ s
} match {
case JNothing => None
case JNull => None
case v: JValue => Some(v.extract[Boolean])
}
}
def boolean(path: String): Boolean = booleanOpt(path).get
def objectOpt[T](path: String)(f: JValue => T): Option[T] = {
path.split("\\.").foldLeft(value) { (v, s) =>
v \ s
} match {
case x: JObject => Some(f(x))
case _ => None
}
}
override def toString = JsonMethods.pretty(value)
def seqOpt[T](path: String): Seq[T] = {
path.split("\\.").foldLeft(value) { (v, s) =>
v \ s
} match {
case JNothing => Nil
case JNull => Nil
case v: JArray => v.values.map(_.asInstanceOf[T])
case v: JValue => List(v.asInstanceOf[T])
}
}
def seq(path: String): Seq[String] = seqOpt(path)
}
| code-check/github-api-scala | src/main/scala/codecheck/github/models/AbstractJson.scala | Scala | mit | 1,854 |
/*
* Copyright 2014 Lars Edenbrandt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package se.nimsa.sbx.app
object GeneralProtocol {
case class SystemInformation(version: String)
sealed trait SourceType {
override def toString: String = this match {
//SourceType string used as db key of type VARCHAR(64) so don't do over 64 chars...
case SourceType.SCP => "scp"
case SourceType.DIRECTORY => "directory"
case SourceType.BOX => "box"
case SourceType.USER => "user"
case SourceType.IMPORT => "import"
case _ => "unknown"
}
}
object SourceType {
case object SCP extends SourceType
case object DIRECTORY extends SourceType
case object BOX extends SourceType
case object USER extends SourceType
case object UNKNOWN extends SourceType
case object IMPORT extends SourceType
def withName(string: String): SourceType = string match {
case "scp" => SCP
case "directory" => DIRECTORY
case "box" => BOX
case "user" => USER
case "import" => IMPORT
case _ => UNKNOWN
}
}
sealed trait DestinationType {
override def toString: String = this match {
case DestinationType.SCU => "scu"
case DestinationType.BOX => "box"
case _ => "unknown"
}
}
object DestinationType {
case object SCU extends DestinationType
case object BOX extends DestinationType
case object UNKNOWN extends DestinationType
def withName(string: String): DestinationType = string match {
case "scu" => SCU
case "box" => BOX
case _ => UNKNOWN
}
}
case class Source(sourceType: SourceType, sourceName: String, sourceId: Long) {
def toSourceRef: SourceRef = SourceRef(sourceType, sourceId)
}
case class SourceRef(sourceType: SourceType, sourceId: Long)
case class SourceAdded(sourceRef: SourceRef)
case class SourceDeleted(sourceRef: SourceRef)
case class Destination(destinationType: DestinationType, destinationName: String, destinationId: Long)
case class ImageAdded(imageId: Long, source: Source, overwrite: Boolean)
case class ImagesDeleted(imageIds: Seq[Long])
case class ImagesSent(destination: Destination, imageIds: Seq[Long])
case class DicomDictionaryKeyword(keyword: String)
case class DicomDictionaryTag(tag: Int)
case class DicomDictionaryKeywords(keywords: List[String])
case class DicomValueRepresentation(name: String, code: Int)
}
| slicebox/slicebox | src/main/scala/se/nimsa/sbx/app/GeneralProtocol.scala | Scala | apache-2.0 | 2,984 |
package sttp.client3.impl.cats
import cats.effect.kernel.{Sync, Async}
import cats.~>
import sttp.capabilities.Effect
import sttp.client3.monad.{FunctionK, MapEffect}
import sttp.client3.{Identity, Request, Response, SttpBackend}
import sttp.monad.{MonadAsyncError, MonadError}
object implicits extends CatsImplicits
trait CatsImplicits extends LowerLevelCatsImplicits {
implicit final def sttpBackendToCatsMappableSttpBackend[R[_], P](
sttpBackend: SttpBackend[R, P]
): MappableSttpBackend[R, P] = new MappableSttpBackend(sttpBackend)
implicit final def asyncMonadError[F[_]: Async]: MonadAsyncError[F] = new CatsMonadAsyncError[F]
}
trait LowerLevelCatsImplicits {
implicit final def monadError[F[_]: Sync]: MonadError[F] = new CatsMonadError[F]
}
final class MappableSttpBackend[F[_], P] private[cats] (
private val sttpBackend: SttpBackend[F, P]
) extends AnyVal {
def mapK[G[_]: MonadError](f: F ~> G, g: G ~> F): SttpBackend[G, P] =
new MappedKSttpBackend(sttpBackend, f, g, implicitly)
}
private[cats] final class MappedKSttpBackend[F[_], +P, G[_]](
wrapped: SttpBackend[F, P],
f: F ~> G,
g: G ~> F,
val responseMonad: MonadError[G]
) extends SttpBackend[G, P] {
def send[T, R >: P with Effect[G]](request: Request[T, R]): G[Response[T]] =
f(
wrapped.send(
MapEffect[G, F, Identity, T, P](
request: Request[T, P with Effect[G]],
asFunctionK(g),
asFunctionK(f),
responseMonad,
wrapped.responseMonad
)
)
)
def close(): G[Unit] = f(wrapped.close())
private def asFunctionK[A[_], B[_]](ab: A ~> B) =
new FunctionK[A, B] {
override def apply[X](x: A[X]): B[X] = ab(x)
}
}
| softwaremill/sttp | effects/cats/src/main/scala/sttp/client3/impl/cats/implicits.scala | Scala | apache-2.0 | 1,729 |
import scala.reflect.runtime.universe._
import scala.tools.reflect.Eval
object Test extends App {
reify {
val foo :: bar :: _ = List(1, 2, 3)
println(foo * bar)
}.eval
}
| som-snytt/dotty | tests/disabled/macro/run/t5273_2a_oldpatmat.scala | Scala | apache-2.0 | 183 |
import sbt._
object Dependencies {
val servletApiDep = "javax.servlet" % "javax.servlet-api" % "3.1.0" % "provided"
def specs2Dep(sv: String) = {
"org.specs2" %% "specs2-core" % "4.0.2"
}
def okHttp = "com.squareup.okhttp3" % "okhttp" % "3.5.0" :: Nil
def integrationTestDeps(sv: String) = (specs2Dep(sv) :: okHttp) map { _ % "test" }
val commonsCodecVersion = "1.11"
val scalacheckVersion = "1.13.5"
val scalaXmlVersion = "1.0.6"
val commonsIoVersion = "2.6"
val commonsFileUploadVersion = "1.3.3"
val jettyVersion = "9.4.8.v20171121"
val nettyVersion = "4.1.13.Final"
val scalatestVersion = "3.0.5-M1"
val json4sVersion = "3.5.3"
val asyncHttpClientVersion = "1.8.17"
val scribeJavaVersion = "3.3.0"
}
| hamnis/unfiltered | project/Dependencies.scala | Scala | mit | 745 |
package submission
import play.api.libs.ws.WSResponse
import play.api.libs.ws.ning.NingWSResponse
import scala.concurrent.{ExecutionContext, Future}
import ExecutionContext.Implicits.global
import play.api.{Logger, http}
import models.domain.Claim
class MockWebServiceClient extends FormSubmission {
def submitClaim(claim: Claim, txnId:String): Future[WSResponse] = {
Logger.info(s"Claim submitting mock transactionId : ${ txnId}")
val resp =
new NingWSResponse(null) {
override def status: Int = http.Status.OK
override lazy val body: String =
getBodyString(txnId)
}
Future(resp)
}
def getBodyString(txnId: String): String = {
txnId match {
case "GOOD_SUBMIT" =>
<response>
<result>response</result>
<correlationID>correlationID</correlationID>
<pollEndpoint>pollEndpoint</pollEndpoint>
<errorCode></errorCode>
</response>
.buildString(stripComments = false)
case "ERROR_SUBMIT" =>
<response>
<result>error</result>
<correlationID>correlationID</correlationID>
<pollEndpoint>pollEndpoint</pollEndpoint>
<errorCode>3001</errorCode>
</response>
.buildString(stripComments = false)
case "RECOVER_SUBMIT" =>
<response>
<result>acknowledgement</result>
<correlationID>correlationID</correlationID>
<pollEndpoint>pollEndpoint</pollEndpoint>
<errorCode></errorCode>
</response>
.buildString(stripComments = false)
case "UNKNOWN_SUBMIT" =>
<response>
<result></result>
<correlationID>correlationID</correlationID>
<pollEndpoint>pollEndpoint</pollEndpoint>
<errorCode></errorCode>
</response>
.buildString(stripComments = false)
}
}
}
| Department-for-Work-and-Pensions/ClaimCapture | c3/test/submission/MockWebServiceClient.scala | Scala | mit | 1,886 |
package leo
package modules.phase
import leo.agents._
import leo.datastructures.blackboard._
import leo.datastructures.blackboard.impl.SZSStore
import leo.datastructures.blackboard.scheduler.Scheduler
import leo.modules.interleavingproc.SZSStatus
object Phase {
def getStdPhases : Seq[Phase] = List() //List(new LoadPhase(true), SimplificationPhase, ParamodPhase)
def getHOStdPhase : Seq[Phase] = List() //List(new LoadPhase(true), PreprocessPhase, ParamodPhase)
def getSplitFirst : Seq[Phase] = List() //List(new LoadPhase(true), PreprocessPhase, ExhaustiveClausificationPhase, SplitPhase, ParamodPhase)
def getCounterSat : Seq[Phase] = List() //List(new LoadPhase(false), FiniteHerbrandEnumeratePhase, PreprocessPhase, ParamodPhase)
def getCounterSatRemote : Seq[Phase] = List() //List(new LoadPhase(false), FiniteHerbrandEnumeratePhase, RemoteCounterSatPhase)
def getExternalPhases : Seq[Phase] = List() //List(new LoadPhase(true), PreprocessPhase, ExternalProverPhase)
/**
* Creates a complete phase from a List of Agents.
*
* @param dname Name of the Phase
* @param dagents Agents to be used in this phase.
* @param endBy Endcondition besides no further work, standard constant false
* @return - A phase executing all agents until nothing is left to do.
*/
def apply(dname : String, dagents : Seq[Agent])(blackboard: Blackboard, scheduler: Scheduler, endBy : Delta => Boolean = _ => false): Phase = new CompletePhase(blackboard, scheduler, endBy) {
override protected def agents: Seq[Agent] = dagents
override def name: String = dname
}
}
/**
* Trait for a MainPhase in Leo-III
*
* @author Max Wisniewski
* @since 12/1/14
*/
abstract class Phase(val blackboard: Blackboard, val scheduler : Scheduler) {
/**
* Executes the Phase.
*
* @return true, if the phase was performed successful and the next phase is allowed to commence. false, otherwise
*/
def execute() : Boolean
/**
* Returns the name of the phase.
*
* @return
*/
def name : String
/**
* Returns a short description and
* all agents, that were started, for this phase.
*
* @return
*/
lazy val description : String = s" Agents used:\\n ${agents.map(_.name).mkString("\\n ")}"
/**
* A list of all agents to be started.
*
* @return
*/
protected def agents : Seq[Agent]
/**
* Method to start the agents, defined in `agents`
*/
protected def init() : Unit = {
agents.foreach{a => blackboard.registerAgent(a)}
}
/**
* Method to finish the agents.
*/
protected def end() : Unit = {
scheduler.pause()
agents.foreach(a => blackboard.unregisterAgent(a))
scheduler.clear()
}
}
/**
* Abstract Phase, that implements
* the execute to start the agents and wait for all to finish.
*/
abstract class CompletePhase(blackboard: Blackboard, scheduler: Scheduler, val endBy : Delta => Boolean, endTypes : Seq[DataType[Any]] = Nil) extends Phase(blackboard, scheduler) {
private def getName = name
protected var waitAgent : CompleteWait = null
def initWait() : Unit = {
waitAgent = new CompleteWait
blackboard.registerAgent(waitAgent)
}
override def end() : Unit = {
super.end()
agents foreach {a => a.kill()}
blackboard.unregisterAgent(waitAgent)
waitAgent = null
waitAgent = null
}
/**
* Waits until the Wait Agent signals
* the end of the execution
*
* @return true, if the execution was sucessfull, false otherwise
*/
def waitTillEnd() : Boolean = {
scheduler.signal()
waitAgent.synchronized{while(!waitAgent.finish) waitAgent.wait()}
if(waitAgent.scedKill) return false
return true
}
/**
* Executes all defined agents and waits till no work is left.
*/
override def execute() : Boolean = {
// Starting all agents and signal scheduler
init()
initWait()
if(!waitTillEnd()) {
Out.info(s"$name will be terminated and program is quitting.")
agents foreach {a => a.kill()}
return false
}
// Ending all agents and clear the scheduler
end()
// If executing till the end, we will always return true, if other behaviour is wished, it has to be implemented
return true
}
protected class CompleteWait extends AbstractAgent {
var finish = false
var scedKill = false
override def interest : Option[Seq[DataType[Any]]] = Some(endTypes)
@inline override val init: Iterable[Task] = Seq()
override def filter(event: Event): Iterable[Task] = event match {
case DoneEvent =>
synchronized{finish = true; notifyAll()};List()
case r : Delta =>
if(endBy(r)){
synchronized{finish = true; notifyAll()}
}
// if(r.inserts(StatusType).nonEmpty || r.updates(StatusType).nonEmpty || r.inserts(SZSStatus).nonEmpty || r.updates(SZSStatus).nonEmpty){
// synchronized{finish = true; notifyAll()}
// }
List()
case _ => List()
}
override def name: String = s"${getName}Terminator"
override def kill(): Unit = synchronized{
Out.info(s"$name was killed.")
scedKill = true
finish = true
notifyAll()
}
}
}
| leoprover/Leo-III | oldsrc/main/scala/leo/modules/phase/Phase.scala | Scala | bsd-3-clause | 5,190 |
package com.seanshubin.detangler.scanner
import java.nio.file.Path
import com.seanshubin.detangler.timer.Timer
class FileScannerImpl(zipScanner: ZipScanner,
classScanner: ClassScanner,
timer: Timer) extends FileScanner {
override def loadBytes(jarOrClass: Path): Iterable[Seq[Byte]] = {
if (FileTypes.isCompressed(jarOrClass.toString)) {
timer.measureTime(s"scan compressed file $jarOrClass") {
zipScanner.loadBytes(jarOrClass)
}
} else {
classScanner.loadBytes(jarOrClass)
}
}
}
| SeanShubin/detangler | scanner/src/main/scala/com/seanshubin/detangler/scanner/FileScannerImpl.scala | Scala | unlicense | 569 |
/*
* Copyright 2016-2018 SN127.fi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package fi.sn127.tackler.parser
import org.scalatest.FlatSpec
import fi.sn127.tackler.core.{Settings, TxnException}
class TacklerTxnsTest extends FlatSpec {
behavior of "TacklerTxns with String"
it should "create git commitId by string" in {
assert(TacklerTxns.gitCommitId("1234567890") === Right[String,String]("1234567890"))
}
it should "create git ref by settings" in {
val settings = Settings()
assert(TacklerTxns.gitReference(settings) === Left[String,String]("master"))
}
it should "create git ref by string" in {
assert(TacklerTxns.gitReference("unit-test-ref") === Left[String,String]("unit-test-ref"))
}
/**
* test: 52836ff9-94de-4575-bfae-6b5afa971351
*/
it should "notice unbalanced transaction" in {
val txnStr =
"""2017-01-01 str
| e 1
| a 1
|""".stripMargin
val ex = intercept[TxnException]{
val tt = new TacklerTxns(Settings())
tt.string2Txns(txnStr)
}
assert(ex.getMessage === "TXN postings do not zero: 2")
}
/**
* test: 200aad57-9275-4d16-bdad-2f1c484bcf17
*/
it should "handle multiple txns" in {
val txnStr =
"""2017-01-03 str3
| e 1
| a
|
|2017-01-01 str1
| e 1
| a
|
|2017-01-02 str2
| e 1
| a
|
|""".stripMargin
val tt = new TacklerTxns(Settings())
val txnData = tt.string2Txns(txnStr)
assert(txnData.txns.length === 3)
assert(txnData.txns.head.header.description.getOrElse("") === "str1")
assert(txnData.txns.last.header.description.getOrElse("") === "str3")
}
}
| jaa127/tackler | core/src/test/scala/fi/sn127/tackler/parser/TacklerTxnsTest.scala | Scala | apache-2.0 | 2,243 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.examples
import java.util.Random
import scala.math.exp
import breeze.linalg.{Vector, DenseVector}
import org.apache.spark._
/**
* Logistic regression based classification.
* Usage: SparkLR [slices]
*/
object SparkLR {
val N = 10000 // Number of data points
val D = 10 // Numer of dimensions
val R = 0.7 // Scaling factor
val ITERATIONS = 5
val rand = new Random(42)
case class DataPoint(x: Vector[Double], y: Double)
def generateData = {
def generatePoint(i: Int) = {
val y = if(i % 2 == 0) -1 else 1
val x = DenseVector.fill(D){rand.nextGaussian + y * R}
DataPoint(x, y)
}
Array.tabulate(N)(generatePoint)
}
def main(args: Array[String]) {
val sparkConf = new SparkConf().setAppName("SparkLR")
val sc = new SparkContext(sparkConf)
val numSlices = if (args.length > 0) args(0).toInt else 2
val points = sc.parallelize(generateData, numSlices).cache()
// Initialize w to a random value
var w = DenseVector.fill(D){2 * rand.nextDouble - 1}
println("Initial w: " + w)
for (i <- 1 to ITERATIONS) {
println("On iteration " + i)
val gradient = points.map { p =>
p.x * (1 / (1 + exp(-p.y * (w.dot(p.x)))) - 1) * p.y
}.reduce(_ + _)
w -= gradient
}
println("Final w: " + w)
sc.stop()
}
}
| adobe-research/spark-cluster-deployment | initial-deployment-puppet/modules/spark/files/spark/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala | Scala | apache-2.0 | 2,151 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.network.netty
import java.io.InputStreamReader
import java.nio._
import java.nio.charset.StandardCharsets
import java.util.concurrent.TimeUnit
import scala.concurrent.Promise
import scala.concurrent.duration._
import scala.util.{Failure, Success, Try}
import com.google.common.io.CharStreams
import org.mockito.Mockito._
import org.scalatest.Matchers
import org.scalatest.mockito.MockitoSugar
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.internal.config._
import org.apache.spark.internal.config.Network
import org.apache.spark.network.{BlockDataManager, BlockTransferService}
import org.apache.spark.network.buffer.{ManagedBuffer, NioManagedBuffer}
import org.apache.spark.network.shuffle.BlockFetchingListener
import org.apache.spark.storage.{BlockId, ShuffleBlockId}
import org.apache.spark.util.ThreadUtils
class NettyBlockTransferSecuritySuite extends SparkFunSuite with MockitoSugar with Matchers {
test("security default off") {
val conf = new SparkConf()
.set("spark.app.id", "app-id")
testConnection(conf, conf) match {
case Success(_) => // expected
case Failure(t) => fail(t)
}
}
test("security on same password") {
val conf = new SparkConf()
.set(NETWORK_AUTH_ENABLED, true)
.set(AUTH_SECRET, "good")
.set("spark.app.id", "app-id")
testConnection(conf, conf) match {
case Success(_) => // expected
case Failure(t) => fail(t)
}
}
test("security on mismatch password") {
val conf0 = new SparkConf()
.set(NETWORK_AUTH_ENABLED, true)
.set(AUTH_SECRET, "good")
.set("spark.app.id", "app-id")
val conf1 = conf0.clone.set(AUTH_SECRET, "bad")
testConnection(conf0, conf1) match {
case Success(_) => fail("Should have failed")
case Failure(t) => t.getMessage should include ("Mismatched response")
}
}
test("security mismatch auth off on server") {
val conf0 = new SparkConf()
.set(NETWORK_AUTH_ENABLED, true)
.set(AUTH_SECRET, "good")
.set("spark.app.id", "app-id")
val conf1 = conf0.clone.set(NETWORK_AUTH_ENABLED, false)
testConnection(conf0, conf1) match {
case Success(_) => fail("Should have failed")
case Failure(t) => // any funny error may occur, sever will interpret SASL token as RPC
}
}
test("security mismatch auth off on client") {
val conf0 = new SparkConf()
.set(NETWORK_AUTH_ENABLED, false)
.set(AUTH_SECRET, "good")
.set("spark.app.id", "app-id")
val conf1 = conf0.clone.set(NETWORK_AUTH_ENABLED, true)
testConnection(conf0, conf1) match {
case Success(_) => fail("Should have failed")
case Failure(t) => t.getMessage should include ("Expected SaslMessage")
}
}
test("security with aes encryption") {
val conf = new SparkConf()
.set(NETWORK_AUTH_ENABLED, true)
.set(AUTH_SECRET, "good")
.set("spark.app.id", "app-id")
.set(Network.NETWORK_CRYPTO_ENABLED, true)
.set(Network.NETWORK_CRYPTO_SASL_FALLBACK, false)
testConnection(conf, conf) match {
case Success(_) => // expected
case Failure(t) => fail(t)
}
}
/**
* Creates two servers with different configurations and sees if they can talk.
* Returns Success() if they can transfer a block, and Failure() if the block transfer was failed
* properly. We will throw an out-of-band exception if something other than that goes wrong.
*/
private def testConnection(conf0: SparkConf, conf1: SparkConf): Try[Unit] = {
val blockManager = mock[BlockDataManager]
val blockId = ShuffleBlockId(0, 1, 2)
val blockString = "Hello, world!"
val blockBuffer = new NioManagedBuffer(ByteBuffer.wrap(
blockString.getBytes(StandardCharsets.UTF_8)))
when(blockManager.getBlockData(blockId)).thenReturn(blockBuffer)
val securityManager0 = new SecurityManager(conf0)
val exec0 = new NettyBlockTransferService(conf0, securityManager0, "localhost", "localhost", 0,
1)
exec0.init(blockManager)
val securityManager1 = new SecurityManager(conf1)
val exec1 = new NettyBlockTransferService(conf1, securityManager1, "localhost", "localhost", 0,
1)
exec1.init(blockManager)
val result = fetchBlock(exec0, exec1, "1", blockId) match {
case Success(buf) =>
val actualString = CharStreams.toString(
new InputStreamReader(buf.createInputStream(), StandardCharsets.UTF_8))
actualString should equal(blockString)
buf.release()
Success(())
case Failure(t) =>
Failure(t)
}
exec0.close()
exec1.close()
result
}
/** Synchronously fetches a single block, acting as the given executor fetching from another. */
private def fetchBlock(
self: BlockTransferService,
from: BlockTransferService,
execId: String,
blockId: BlockId): Try[ManagedBuffer] = {
val promise = Promise[ManagedBuffer]()
self.fetchBlocks(from.hostName, from.port, execId, Array(blockId.toString),
new BlockFetchingListener {
override def onBlockFetchFailure(blockId: String, exception: Throwable): Unit = {
promise.failure(exception)
}
override def onBlockFetchSuccess(blockId: String, data: ManagedBuffer): Unit = {
promise.success(data.retain())
}
}, null)
ThreadUtils.awaitReady(promise.future, FiniteDuration(10, TimeUnit.SECONDS))
promise.future.value.get
}
}
| pgandhi999/spark | core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala | Scala | apache-2.0 | 6,313 |
class S extends foo.bar.J {
sss =>
val fn = () => {
foo(S.this)
}
fn()
}
object Test {
def main(args: Array[String]): Unit = {
new S
}
}
| som-snytt/dotty | tests/run/t4119/S.scala | Scala | apache-2.0 | 159 |
package scala.reflect.internal
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra._
import org.openjdk.jmh.runner.IterationType
import benchmark._
import java.util.concurrent.TimeUnit
import scala.reflect.internal.util.BatchSourceFile
@BenchmarkMode(Array(Mode.AverageTime))
@Fork(2)
@Threads(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Benchmark)
class SymbolBenchmark {
import scala.tools.nsc._
var g: Global = _
var symbol: Global#Symbol = _
@Setup(Level.Trial)
def setup(): Unit = {
val settings = new Settings()
settings.usejavacp.value = true
settings.stopAfter.value = List("typer")
val global = new Global(settings)
g = global
val run = new global.Run()
val source = g.newSourceFile("package p1; class C { def foo: List[String] = Nil }")
run.compileSources(source :: Nil)
val foo = global.rootMirror.getClassIfDefined("p1.C").info.decl(global.newTermName("foo"))
symbol = foo
}
@Benchmark def measure(bh: Blackhole): Unit = {
val r = g.currentRun
g.phase = r.erasurePhase
bh.consume(symbol.info)
g.phase = r.typerPhase
bh.consume(symbol.info)
}
}
| scala/scala | test/benchmarks/src/main/scala/scala/reflect/internal/SymbolBenchmark.scala | Scala | apache-2.0 | 1,218 |
//
// OrcBindings.scala -- Scala class OrcBindings
// Project OrcScala
//
// $Id: OrcBindings.scala 2985 2012-03-17 19:33:26Z laurenyew $
//
// Created by jthywiss on May 26, 2010.
//
// Copyright (c) 2011 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.script
import java.util.Map
import java.io.File
import javax.script.SimpleBindings
import orc.OrcOptions
/** An extended implementation of <code>javax.script.Bindings</code>
* with type-specific get and put methods.
*
* @author jthywiss
*/
class OrcBindings(m: Map[String, Object]) extends SimpleBindings(m) with OrcOptions {
import scala.collection.JavaConversions._
def this() = this(new java.util.HashMap[String, Object])
def filename: String = getString("javax.script.filename", "")
def filename_=(newVal: String) = putString("javax.script.filename", newVal)
def logLevel: String = getString("orc.logLevel", "INFO")
def logLevel_=(newVal: String) = putString("orc.logLevel", newVal)
// Compile options
def usePrelude: Boolean = getBoolean("orc.usePrelude", true)
def usePrelude_=(newVal: Boolean) = putBoolean("orc.usePrelude", newVal)
def includePath: java.util.List[String] = getPathList("orc.includePath", List("."))
def includePath_=(newVal: java.util.List[String]) = putPathList("orc.includePath", newVal)
def additionalIncludes: java.util.List[String] = getPathList("orc.additionalIncludes", List())
def additionalIncludes_=(newVal: java.util.List[String]) = putPathList("orc.additionalIncludes", newVal)
def typecheck: Boolean = getBoolean("orc.typecheck", false)
def typecheck_=(newVal: Boolean) = putBoolean("orc.typecheck", newVal)
def securityCheck: Boolean = getBoolean("orc.securityCheck", false)//SL analysis option
def securityCheck_=(newVal: Boolean) = putBoolean("orc.securityCheck", newVal)//SL analysis option
def disableRecursionCheck: Boolean = getBoolean("orc.disableRecursionCheck", false)
def disableRecursionCheck_=(newVal: Boolean) = putBoolean("orc.disableRecursionCheck", newVal)
def echoOil: Boolean = getBoolean("orc.echoOil", false)
def echoOil_=(newVal: Boolean) = putBoolean("orc.echoOil", newVal)
def oilOutputFile: Option[File] = {
getString("orc.oilOutputFile", "") match {
case "" => None
case f => Some(new File(f))
}
}
def oilOutputFile_=(newVal: Option[File]) = putString("orc.oilOutputFile", newVal.map(_.toString).getOrElse(""))
def compileOnly: Boolean = getBoolean("orc.onlyCompile", false)
def compileOnly_=(newVal: Boolean) = putBoolean("orc.onlyCompile", newVal)
def runOil: Boolean = getBoolean("orc.runOil", false)
def runOil_=(newVal: Boolean) = putBoolean("orc.runOil", newVal)
// Execution options
def classPath: java.util.List[String] = getPathList("orc.classPath", List())
def classPath_=(newVal: java.util.List[String]) = putPathList("orc.classPath", newVal)
def showJavaStackTrace: Boolean = getBoolean("orc.showJavaStackTrace", false)
def showJavaStackTrace_=(newVal: Boolean) = putBoolean("orc.showJavaStackTrace", newVal)
def disableTailCallOpt: Boolean = getBoolean("orc.disableTailCallOpt", false)
def disableTailCallOpt_=(newVal: Boolean) = putBoolean("orc.disableTailCallOpt", newVal)
def stackSize: Int = getInt("orc.stackSize", -1)
def stackSize_=(newVal: Int) = putInt("orc.stackSize", newVal)
def maxTokens: Int = getInt("orc.maxTokens", -1)
def maxTokens_=(newVal: Int) = putInt("orc.maxTokens", newVal)
def maxSiteThreads: Int = getInt("orc.maxSiteThreads", -1)
def maxSiteThreads_=(newVal: Int) = putInt("orc.maxSiteThreads", newVal)
var capabilities = new java.util.HashMap[String, Boolean]()
def hasRight(rightName: String): Boolean = {
if (capabilities.containsKey(rightName)) {
capabilities.get(rightName)
} else {
false
}
}
def setRight(capName: String, newVal: Boolean) {
capabilities.put(capName, newVal)
}
/** @param key
* @param value
*/
def putString(key: String, value: String) {
put(key, value.toString)
}
/** @param key
* @param default
* @return
*/
def getString(key: String, default: String): String = {
val value = get(key)
value match {
case s: String => s
case _ => default
}
}
/** @param key
* @param value
*/
def putInt(key: String, value: Int) {
put(key, value.toString)
}
/** @param key
* @param def
* @return
*/
def getInt(key: String, default: Int): Int = {
try {
get(key) match {
case s: String => s.toInt
case _ => default
}
} catch {
case e: NumberFormatException => default
}
}
/** @param key
* @param value
*/
def putLong(key: String, value: Long) {
put(key, value.toString)
}
/** @param key
* @param def
* @return
*/
def getLong(key: String, default: Long): Long = {
try {
get(key) match {
case s: String => s.toLong
case _ => default
}
} catch {
case e: NumberFormatException => default
}
}
/** @param key
* @param value
*/
def putBoolean(key: String, value: Boolean) {
put(key, value.toString)
}
/** @param key
* @param def
* @return
*/
def getBoolean(key: String, default: Boolean): Boolean = {
get(key) match {
case s: String if s.equalsIgnoreCase("true") => true
case s: String if s.equalsIgnoreCase("false") => false
case _ => default
}
}
/** @param key
* @param value
*/
def putFloat(key: String, value: Float) {
put(key, value.toString)
}
/** @param key
* @param def
* @return
*/
def getFloat(key: String, default: Float): Float = {
try {
get(key) match {
case s: String => s.toFloat
case _ => default
}
} catch {
case e: NumberFormatException => default
}
}
/** @param key
* @param value
*/
def putDouble(key: String, value: Double) {
put(key, value.toString)
}
/** @param key
* @param def
* @return
*/
def getDouble(key: String, default: Double): Double = {
try {
get(key) match {
case s: String => s.toDouble
case _ => default
}
} catch {
case e: NumberFormatException => default
}
}
/** @param key
* @param value
*/
def putPathList(key: String, value: java.util.List[String]) {
if (value.length > 0) {
put(key, value.mkString(File.pathSeparator))
} else {
put(key, "")
}
}
def getPathList(key: String, default: java.util.List[String]): java.util.List[String] = {
val value = get(key)
value match {
case s: String if (s.length == 0) => new java.util.ArrayList[String](0)
case s: String => s.split(File.pathSeparator).toList
case _ => default
}
}
}
| laurenyew/cOrcS | src/orc/script/OrcBindings.scala | Scala | bsd-3-clause | 7,046 |
package com.mycompany.scalcium.transformers
import java.io.File
import java.util.regex.Pattern
import scala.Array.canBuildFrom
import scala.io.Source
case class Offset(val start: Int, val end: Int) {
def isNone = start == -1 && end == -1
def None = Offset(-1,-1)
}
class NegexAnnotator(val ruleFile: File,
val responses: List[String]) {
val rules = sortRules(ruleFile)
/**
* Predicts sense of the phrase in the sentence
* as one of the responses based on whether the
* negTagger method returns true or false.
* @param sentence the sentence.
* @param phrase the phrase.
* @nonStrict true if non-strict mode.
* @return a response (passed into constructor).
*/
def predict(sentence: String, phrase: String,
nonStrict: Boolean): String =
if (negTagger(sentence, phrase, nonStrict))
responses(0)
else responses(1)
/**
* Parses trigger rules file and converts them
* to a List of (trigger pattern, rule type) pairs
* sorted by descending order of length of
* original trigger string. This method is called
* on construction (should not be called from
* client code).
* @param the trigger rules File.
* @return List of (trigger pattern, rule type)
* sorted by trigger string length.
*/
def sortRules(ruleFile: File): List[(Pattern,String)] = {
Source.fromFile(ruleFile)
.getLines()
// input format: trigger phrase\\t\\t[TYPE]
.map(line => {
val cols = line.split("\\t\\t")
(cols(0), cols(1))
})
.toList
// sort by length descending
.sortWith((a,b) => a._1.length > b._1.length)
// replace spaces by \\\\s+ and convert to pattern
.map(pair => (
Pattern.compile("\\\\b(" + pair._1
.trim()
.replaceAll("\\\\s+", "\\\\\\\\s+") + ")\\\\b"),
pair._2))
}
/**
* This is the heart of the algorithm. It normalizes
* the incoming sentence, then finds the character
* offset (start,end) for the phrase. If a CONJ
* trigger is found, it only considers the part of
* the sentence where the phrase was found. It
* looks at the PREN, POST, PREP and POSP (the
* last 2 if tagPossible=true) looking for trigger
* terms within 5 words of the phrase.
* @param sentence the sentence (unnormalized).
* @param phrase the phrase (unnormalized)
* @param tagPossible true if non-strict mode
* annotation required.
*/
def negTagger(sentence: String, phrase: String,
tagPossible: Boolean): Boolean = {
val normSent = sentence.toLowerCase()
.replaceAll("\\\\s+", " ")
val wordPositions = 0 :: normSent.toCharArray()
.zipWithIndex
.filter(ci => ci._1 == ' ')
.map(ci => ci._2 + 1)
.toList
// tag the phrase
val phrasePattern = Pattern.compile(
"\\\\b(" +
phrase.replaceAll("\\\\s+", "\\\\\\\\s+") +
")\\\\b", Pattern.CASE_INSENSITIVE)
val phraseOffset = offset(normSent, phrasePattern)
if (phraseOffset.isNone) return false
// look for CONJ trigger terms
val conjOffsets = offsets(normSent, "[CONJ]", rules)
if (conjOffsets.isEmpty) {
// run through the different rule sets,
// terminating when we find a match
val triggerTypes = if (tagPossible)
List("[PREN]", "[POST]", "[PREP]", "[POSP]")
else List("[PREN]", "[POST]")
isTriggerInScope(normSent, rules,
phraseOffset, wordPositions, triggerTypes)
} else {
// chop off the side of the sentence where
// the phrase does not appear.
val conjOffset = conjOffsets.head
if (conjOffset.end < phraseOffset.start) {
val truncSent = normSent.substring(conjOffset.end + 1)
negTagger(truncSent, phrase, tagPossible)
} else if (phraseOffset.end < conjOffset.start) {
val truncSent = normSent.substring(0, conjOffset.start)
negTagger(truncSent, phrase, tagPossible)
} else {
false
}
}
}
/**
* Returns true if the trigger term is within the
* context of the phrase, ie, within 5 words of
* each other. Recursively checks each rule type
* in the triggerTypes in list.
* @param sentence the normalized sentence.
* @param rules the sorted list of rules.
* @param phraseOffset the phrase offset.
* @param wordPositions the positions of the
* starting character position of each
* word in the normalized sentence.
* @param triggerTypes the trigger types to
* check.
* @return true if trigger is in the context of
* the phrase, false if not.
*/
def isTriggerInScope(
sentence: String,
rules: List[(Pattern,String)],
phraseOffset: Offset,
wordPositions: List[Int],
triggerTypes: List[String]): Boolean = {
if (triggerTypes.isEmpty) false
else {
val currentTriggerType = triggerTypes.head
val triggerOffsets = offsets(sentence,
currentTriggerType, rules)
val selectedTriggerOffset = firstNonOverlappingOffset(
phraseOffset, triggerOffsets)
if (selectedTriggerOffset.isNone)
// try with the next trigger pattern
isTriggerInScope(sentence, rules,
phraseOffset, wordPositions,
triggerTypes.tail)
else {
// check how far the tokens are. If PRE*
// token, then there is no distance limit
// but 5 words is the distance limit for
// POS* rules.
if (currentTriggerType.startsWith("[PRE"))
selectedTriggerOffset.start <
phraseOffset.start
else
wordDistance(phraseOffset,
selectedTriggerOffset,
wordPositions) <= 5 &&
phraseOffset.start <
selectedTriggerOffset.start
}
}
}
/**
* Returns the distance in number of words
* between the phrase and trigger term.
* @param phraseOffset (start,end) for phrase.
* @param triggerOffset (start,end) for trigger.
* @param wordPositions a list of starting
* character positions for each word
* in (normalized) sentence.
* @return number words between phrase and trigger.
*/
def wordDistance(phraseOffset: Offset,
triggerOffset: Offset,
wordPositions: List[Int]): Int = {
if (phraseOffset.start < triggerOffset.start)
wordPositions
.filter(pos => pos > phraseOffset.end &&
pos < triggerOffset.start)
.size
else
wordPositions
.filter(pos => pos > triggerOffset.end &&
pos < phraseOffset.start)
.size
}
/**
* Compute the character offset of the phrase
* in the (normalized) sentence. If there is
* no match, then an Offset(-1,-1) is returned.
* @param sentence the normalized sentence.
* @param pattern the phras
*/
def offset(sentence: String,
pattern: Pattern): Offset = {
val matcher = pattern.matcher(sentence)
if (matcher.find())
Offset(matcher.start(), matcher.end())
else Offset(-1, -1)
}
/**
* Find all offsets for trigger terms for the
* specified rule type. Returns a list of offsets
* for trigger terms that matched.
* @param sentence the normalized sentence.
* @param ruleType the rule type to filter on.
* @param rules the list of sorted rule patterns.
* @return a List of Offsets for matched triggers
* of the specified rule type.
*/
def offsets(sentence: String, ruleType: String,
rules: List[(Pattern,String)]): List[Offset] = {
rules.filter(rule => ruleType.equals(rule._2))
.map(rule => offset(sentence, rule._1))
.filter(offset => (! offset.isNone))
}
/**
* Returns the first trigger term that does not
* overlap with the phrase. May return (-1,-1).
* @param phraseOffset the offset for the phrase.
* @param triggerOffsets a list of Offsets for the
* triggers.
* @return the first non-overlapping offset.
*/
def firstNonOverlappingOffset(phraseOffset: Offset,
triggerOffsets: List[Offset]): Offset = {
val phraseRange = Range(phraseOffset.start, phraseOffset.end)
val nonOverlaps = triggerOffsets
.filter(offset => {
val offsetRange = Range(offset.start, offset.end)
phraseRange.intersect(offsetRange).size == 0
})
if (nonOverlaps.isEmpty) Offset(-1,-1)
else nonOverlaps.head
}
}
| sujitpal/scalcium | src/main/scala/com/mycompany/scalcium/transformers/NegexAnnotator.scala | Scala | apache-2.0 | 8,414 |
package edu.scalanus.util
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.{FlatSpec, Matchers}
class LcfPositionSpec extends FlatSpec with TableDrivenPropertyChecks with Matchers {
behavior of "LcfPosition"
it should "generate nice toString" in {
val data = Table(
("position", "toString"),
(LcfPosition(1), "line 1"),
(LcfPosition(1, 1), "line 1:1"),
(LcfPosition(1, fileName = "file.txt"), "file.txt:1"),
(LcfPosition(1, 1, "file.txt"), "file.txt:1:1")
)
forEvery(data) { (pos, str) =>
pos.toString shouldBe str
}
}
it should "throw IllegalArgumentException given negative line number" in {
an[IllegalArgumentException] should be thrownBy {
LcfPosition(-1)
}
}
it should "throw IllegalArgumentException given column number less than -1" in {
an[IllegalArgumentException] should be thrownBy {
LcfPosition(10, -2)
}
}
}
| mkaput/scalanus | src/test/scala/edu/scalanus/util/LcfPositionSpec.scala | Scala | mit | 947 |
import sbt._
object Dependencies {
val resolutionRepos = Seq(
"spray repo" at "http://repo.spray.io/",
"typesafe repo" at "http://repo.typesafe.com/typesafe/releases/",
"theatr.us" at "http://repo.theatr.us"
)
val akkaV = "2.4.0"
val sprayV = "1.3.1"
val sprayCan = "io.spray" %% "spray-can" % sprayV
val sprayClient = "io.spray" %% "spray-client" % sprayV
val sprayRouting = "io.spray" %% "spray-routing" % sprayV
val sprayTestKit = "io.spray" %% "spray-testkit" % sprayV
val sprayJson = "io.spray" %% "spray-json" % "1.2.6"
val akkaActor = "com.typesafe.akka" %% "akka-actor" % akkaV
val akkaTestKit = "com.typesafe.akka" %% "akka-testkit" % akkaV
val akkaRemote = "com.typesafe.akka" %% "akka-remote" % akkaV
val akkaCluster = "com.typesafe.akka" %% "akka-cluster" % akkaV
val multiNodeTestKit = "com.typesafe.akka" %% "akka-multi-node-testkit" % akkaV
val akkaSlf4j = "com.typesafe.akka" %% "akka-slf4j" % akkaV
val akkaContrib = "com.typesafe.akka" %% "akka-contrib" % akkaV
val scalaTest = "org.scalatest" %% "scalatest" % "2.2.1"
val akkaQuartz = "us.theatr" %% "akka-quartz" % "0.3.0"
val hdrHistogram = "org.hdrhistogram" % "HdrHistogram" % "2.1.7"
val specs2 = "org.specs2" %% "specs2-core" % "2.3.11"
val mockito = "org.mockito" % "mockito-all" % "1.9.5"
val cassandraDriver = "com.datastax.cassandra" % "cassandra-driver-core" % "2.1.4"
val kryo = "com.esotericsoftware.kryo" % "kryo" % "2.24.0"
val scalaLogging = "com.typesafe.scala-logging" %% "scala-logging" % "3.1.0"
val slf4j = "org.slf4j" % "slf4j-api" % "1.7.7"
val logbackClassic = "ch.qos.logback" % "logback-classic" % "1.1.2"
val commonsLang = "commons-lang" % "commons-lang" % "2.6"
val commonsCodec = "commons-codec" % "commons-codec" % "1.9"
val parserCombinators = "org.scala-lang.modules" %% "scala-parser-combinators" % "1.0.2"
val snappy = "org.xerial.snappy" % "snappy-java" % "1.1.1.6"
val jacksonAfterBurner = "com.fasterxml.jackson.module" % "jackson-module-afterburner" % "2.4.4"
val jacksonScala = "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.4.4"
def compile(deps: ModuleID*): Seq[ModuleID] = deps map (_ % "compile")
def provided(deps: ModuleID*): Seq[ModuleID] = deps map (_ % "provided")
def test(deps: ModuleID*): Seq[ModuleID] = deps map (_ % "test")
def runtime(deps: ModuleID*): Seq[ModuleID] = deps map (_ % "runtime")
def it(deps: ModuleID*): Seq[ModuleID] = deps map (_ % "it")
} | despegar/khronus | project/Dependencies.scala | Scala | apache-2.0 | 3,804 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.optim
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.utils.{T, TestUtils}
import org.scalatest.{FlatSpec, Matchers}
import scala.collection.mutable.ArrayBuffer
// @com.intel.analytics.bigdl.tags.Parallel
@com.intel.analytics.bigdl.tags.Serial
class RMSpropSpec extends FlatSpec with Matchers {
val start = System.currentTimeMillis()
"RMSprop" should "perform well on rosenbrock function" in {
val x = Tensor[Double](2).fill(0)
val config = T("learningRate" -> 5e-4)
val optm = new RMSprop[Double]
var fx = new ArrayBuffer[Double]
for (i <- 1 to 10001) {
val result = optm.optimize(TestUtils.rosenBrock, x, config)
if ((i - 1) % 1000 == 0) {
fx += result._2(0)
}
}
println(s"x is \\n$x")
println("fx is")
for (i <- 1 to fx.length) {
println(s"${(i - 1) * 1000 + 1}, ${fx(i - 1)}")
}
val spend = System.currentTimeMillis() - start
println("Time Cost: " + spend + "ms")
(fx.last < 1e-4) should be(true)
x(Array(1)) should be(1.0 +- 0.01)
x(Array(2)) should be(1.0 +- 0.01)
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/optim/RMSpropSpec.scala | Scala | apache-2.0 | 1,765 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package java.nio
import scala.scalajs.js.typedarray._
object ByteBuffer {
private final val HashSeed = -547316498 // "java.nio.ByteBuffer".##
def allocate(capacity: Int): ByteBuffer =
wrap(new Array[Byte](capacity))
def allocateDirect(capacity: Int): ByteBuffer =
TypedArrayByteBuffer.allocate(capacity)
def wrap(array: Array[Byte], offset: Int, length: Int): ByteBuffer =
HeapByteBuffer.wrap(array, 0, array.length, offset, length, false)
def wrap(array: Array[Byte]): ByteBuffer =
wrap(array, 0, array.length)
// Extended API
def wrap(array: ArrayBuffer): ByteBuffer =
TypedArrayByteBuffer.wrap(array)
def wrap(array: ArrayBuffer, byteOffset: Int, length: Int): ByteBuffer =
TypedArrayByteBuffer.wrap(array, byteOffset, length)
def wrap(array: Int8Array): ByteBuffer =
TypedArrayByteBuffer.wrap(array)
}
abstract class ByteBuffer private[nio] (
_capacity: Int, private[nio] val _array: Array[Byte],
private[nio] val _arrayOffset: Int)
extends Buffer(_capacity) with Comparable[ByteBuffer] {
private[nio] type ElementType = Byte
private[nio] type BufferType = ByteBuffer
private[nio] type TypedArrayType = Int8Array
def this(_capacity: Int) = this(_capacity, null, -1)
private[nio] var _isBigEndian: Boolean = true
def slice(): ByteBuffer
def duplicate(): ByteBuffer
def asReadOnlyBuffer(): ByteBuffer
def get(): Byte
def put(b: Byte): ByteBuffer
def get(index: Int): Byte
def put(index: Int, b: Byte): ByteBuffer
@noinline
def get(dst: Array[Byte], offset: Int, length: Int): ByteBuffer =
GenBuffer(this).generic_get(dst, offset, length)
def get(dst: Array[Byte]): ByteBuffer =
get(dst, 0, dst.length)
@noinline
def put(src: ByteBuffer): ByteBuffer =
GenBuffer(this).generic_put(src)
@noinline
def put(src: Array[Byte], offset: Int, length: Int): ByteBuffer =
GenBuffer(this).generic_put(src, offset, length)
final def put(src: Array[Byte]): ByteBuffer =
put(src, 0, src.length)
@inline final def hasArray(): Boolean =
GenBuffer(this).generic_hasArray()
@inline final def array(): Array[Byte] =
GenBuffer(this).generic_array()
@inline final def arrayOffset(): Int =
GenBuffer(this).generic_arrayOffset()
@inline override def position(newPosition: Int): ByteBuffer = {
super.position(newPosition)
this
}
@inline override def limit(newLimit: Int): ByteBuffer = {
super.limit(newLimit)
this
}
@inline override def mark(): ByteBuffer = {
super.mark()
this
}
@inline override def reset(): ByteBuffer = {
super.reset()
this
}
@inline override def clear(): ByteBuffer = {
super.clear()
this
}
@inline override def flip(): ByteBuffer = {
super.flip()
this
}
@inline override def rewind(): ByteBuffer = {
super.rewind()
this
}
def compact(): ByteBuffer
def isDirect(): Boolean
// toString(): String inherited from Buffer
@noinline
override def hashCode(): Int =
GenBuffer(this).generic_hashCode(ByteBuffer.HashSeed)
override def equals(that: Any): Boolean = that match {
case that: ByteBuffer => compareTo(that) == 0
case _ => false
}
@noinline
def compareTo(that: ByteBuffer): Int =
GenBuffer(this).generic_compareTo(that)(java.lang.Byte.compare(_, _))
final def order(): ByteOrder =
if (_isBigEndian) ByteOrder.BIG_ENDIAN
else ByteOrder.LITTLE_ENDIAN
final def order(bo: ByteOrder): ByteBuffer = {
if (bo == null)
throw new NullPointerException
_isBigEndian = bo == ByteOrder.BIG_ENDIAN
this
}
def getChar(): Char
def putChar(value: Char): ByteBuffer
def getChar(index: Int): Char
def putChar(index: Int, value: Char): ByteBuffer
def asCharBuffer(): CharBuffer
def getShort(): Short
def putShort(value: Short): ByteBuffer
def getShort(index: Int): Short
def putShort(index: Int, value: Short): ByteBuffer
def asShortBuffer(): ShortBuffer
def getInt(): Int
def putInt(value: Int): ByteBuffer
def getInt(index: Int): Int
def putInt(index: Int, value: Int): ByteBuffer
def asIntBuffer(): IntBuffer
def getLong(): Long
def putLong(value: Long): ByteBuffer
def getLong(index: Int): Long
def putLong(index: Int, value: Long): ByteBuffer
def asLongBuffer(): LongBuffer
def getFloat(): Float
def putFloat(value: Float): ByteBuffer
def getFloat(index: Int): Float
def putFloat(index: Int, value: Float): ByteBuffer
def asFloatBuffer(): FloatBuffer
def getDouble(): Double
def putDouble(value: Double): ByteBuffer
def getDouble(index: Int): Double
def putDouble(index: Int, value: Double): ByteBuffer
def asDoubleBuffer(): DoubleBuffer
// Internal API
override private[nio] def isBigEndian: Boolean =
_isBigEndian
private[nio] def load(index: Int): Byte
private[nio] def store(index: Int, elem: Byte): Unit
@inline
private[nio] def load(startIndex: Int,
dst: Array[Byte], offset: Int, length: Int): Unit =
GenBuffer(this).generic_load(startIndex, dst, offset, length)
@inline
private[nio] def store(startIndex: Int,
src: Array[Byte], offset: Int, length: Int): Unit =
GenBuffer(this).generic_store(startIndex, src, offset, length)
}
| scala-js/scala-js | javalib/src/main/scala/java/nio/ByteBuffer.scala | Scala | apache-2.0 | 5,568 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.linalg
import scala.util.Random
import breeze.linalg.{DenseMatrix => BDM, squaredDistance => breezeSquaredDistance}
import org.scalatest.FunSuite
import org.apache.spark.SparkException
import org.apache.spark.mllib.util.TestingUtils._
class VectorsSuite extends FunSuite {
val arr = Array(0.1, 0.0, 0.3, 0.4)
val n = 4
val indices = Array(0, 2, 3)
val values = Array(0.1, 0.3, 0.4)
test("dense vector construction with varargs") {
val vec = Vectors.dense(arr).asInstanceOf[DenseVector]
assert(vec.size === arr.length)
assert(vec.values.eq(arr))
}
test("dense vector construction from a double array") {
val vec = Vectors.dense(arr).asInstanceOf[DenseVector]
assert(vec.size === arr.length)
assert(vec.values.eq(arr))
}
test("sparse vector construction") {
val vec = Vectors.sparse(n, indices, values).asInstanceOf[SparseVector]
assert(vec.size === n)
assert(vec.indices.eq(indices))
assert(vec.values.eq(values))
}
test("sparse vector construction with unordered elements") {
val vec = Vectors.sparse(n, indices.zip(values).reverse).asInstanceOf[SparseVector]
assert(vec.size === n)
assert(vec.indices === indices)
assert(vec.values === values)
}
test("dense to array") {
val vec = Vectors.dense(arr).asInstanceOf[DenseVector]
assert(vec.toArray.eq(arr))
}
test("sparse to array") {
val vec = Vectors.sparse(n, indices, values).asInstanceOf[SparseVector]
assert(vec.toArray === arr)
}
test("vector equals") {
val dv1 = Vectors.dense(arr.clone())
val dv2 = Vectors.dense(arr.clone())
val sv1 = Vectors.sparse(n, indices.clone(), values.clone())
val sv2 = Vectors.sparse(n, indices.clone(), values.clone())
val vectors = Seq(dv1, dv2, sv1, sv2)
for (v <- vectors; u <- vectors) {
assert(v === u)
assert(v.## === u.##)
}
val another = Vectors.dense(0.1, 0.2, 0.3, 0.4)
for (v <- vectors) {
assert(v != another)
assert(v.## != another.##)
}
}
test("vectors equals with explicit 0") {
val dv1 = Vectors.dense(Array(0, 0.9, 0, 0.8, 0))
val sv1 = Vectors.sparse(5, Array(1, 3), Array(0.9, 0.8))
val sv2 = Vectors.sparse(5, Array(0, 1, 2, 3, 4), Array(0, 0.9, 0, 0.8, 0))
val vectors = Seq(dv1, sv1, sv2)
for (v <- vectors; u <- vectors) {
assert(v === u)
assert(v.## === u.##)
}
val another = Vectors.sparse(5, Array(0, 1, 3), Array(0, 0.9, 0.2))
for (v <- vectors) {
assert(v != another)
assert(v.## != another.##)
}
}
test("indexing dense vectors") {
val vec = Vectors.dense(1.0, 2.0, 3.0, 4.0)
assert(vec(0) === 1.0)
assert(vec(3) === 4.0)
}
test("indexing sparse vectors") {
val vec = Vectors.sparse(7, Array(0, 2, 4, 6), Array(1.0, 2.0, 3.0, 4.0))
assert(vec(0) === 1.0)
assert(vec(1) === 0.0)
assert(vec(2) === 2.0)
assert(vec(3) === 0.0)
assert(vec(6) === 4.0)
val vec2 = Vectors.sparse(8, Array(0, 2, 4, 6), Array(1.0, 2.0, 3.0, 4.0))
assert(vec2(6) === 4.0)
assert(vec2(7) === 0.0)
}
test("parse vectors") {
val vectors = Seq(
Vectors.dense(Array.empty[Double]),
Vectors.dense(1.0),
Vectors.dense(1.0E6, 0.0, -2.0e-7),
Vectors.sparse(0, Array.empty[Int], Array.empty[Double]),
Vectors.sparse(1, Array(0), Array(1.0)),
Vectors.sparse(3, Array(0, 2), Array(1.0, -2.0)))
vectors.foreach { v =>
val v1 = Vectors.parse(v.toString)
assert(v.getClass === v1.getClass)
assert(v === v1)
}
val malformatted = Seq("1", "[1,,]", "[1,2b]", "(1,[1,2])", "([1],[2.0,1.0])")
malformatted.foreach { s =>
intercept[SparkException] {
Vectors.parse(s)
println(s"Didn't detect malformatted string $s.")
}
}
}
test("zeros") {
assert(Vectors.zeros(3) === Vectors.dense(0.0, 0.0, 0.0))
}
test("Vector.copy") {
val sv = Vectors.sparse(4, Array(0, 2), Array(1.0, 2.0))
val svCopy = sv.copy
(sv, svCopy) match {
case (sv: SparseVector, svCopy: SparseVector) =>
assert(sv.size === svCopy.size)
assert(sv.indices === svCopy.indices)
assert(sv.values === svCopy.values)
assert(!sv.indices.eq(svCopy.indices))
assert(!sv.values.eq(svCopy.values))
case _ =>
throw new RuntimeException(s"copy returned ${svCopy.getClass} on ${sv.getClass}.")
}
val dv = Vectors.dense(1.0, 0.0, 2.0)
val dvCopy = dv.copy
(dv, dvCopy) match {
case (dv: DenseVector, dvCopy: DenseVector) =>
assert(dv.size === dvCopy.size)
assert(dv.values === dvCopy.values)
assert(!dv.values.eq(dvCopy.values))
case _ =>
throw new RuntimeException(s"copy returned ${dvCopy.getClass} on ${dv.getClass}.")
}
}
test("VectorUDT") {
val dv0 = Vectors.dense(Array.empty[Double])
val dv1 = Vectors.dense(1.0, 2.0)
val sv0 = Vectors.sparse(2, Array.empty, Array.empty)
val sv1 = Vectors.sparse(2, Array(1), Array(2.0))
val udt = new VectorUDT()
for (v <- Seq(dv0, dv1, sv0, sv1)) {
assert(v === udt.deserialize(udt.serialize(v)))
}
}
test("fromBreeze") {
val x = BDM.zeros[Double](10, 10)
val v = Vectors.fromBreeze(x(::, 0))
assert(v.size === x.rows)
}
test("sqdist") {
val random = new Random()
for (m <- 1 until 1000 by 100) {
val nnz = random.nextInt(m)
val indices1 = random.shuffle(0 to m - 1).slice(0, nnz).sorted.toArray
val values1 = Array.fill(nnz)(random.nextDouble)
val sparseVector1 = Vectors.sparse(m, indices1, values1)
val indices2 = random.shuffle(0 to m - 1).slice(0, nnz).sorted.toArray
val values2 = Array.fill(nnz)(random.nextDouble)
val sparseVector2 = Vectors.sparse(m, indices2, values2)
val denseVector1 = Vectors.dense(sparseVector1.toArray)
val denseVector2 = Vectors.dense(sparseVector2.toArray)
val squaredDist = breezeSquaredDistance(sparseVector1.toBreeze, sparseVector2.toBreeze)
// SparseVector vs. SparseVector
assert(Vectors.sqdist(sparseVector1, sparseVector2) ~== squaredDist relTol 1E-8)
// DenseVector vs. SparseVector
assert(Vectors.sqdist(denseVector1, sparseVector2) ~== squaredDist relTol 1E-8)
// DenseVector vs. DenseVector
assert(Vectors.sqdist(denseVector1, denseVector2) ~== squaredDist relTol 1E-8)
}
}
test("foreachActive") {
val dv = Vectors.dense(0.0, 1.2, 3.1, 0.0)
val sv = Vectors.sparse(4, Seq((1, 1.2), (2, 3.1), (3, 0.0)))
val dvMap = scala.collection.mutable.Map[Int, Double]()
dv.foreachActive { (index, value) =>
dvMap.put(index, value)
}
assert(dvMap.size === 4)
assert(dvMap.get(0) === Some(0.0))
assert(dvMap.get(1) === Some(1.2))
assert(dvMap.get(2) === Some(3.1))
assert(dvMap.get(3) === Some(0.0))
val svMap = scala.collection.mutable.Map[Int, Double]()
sv.foreachActive { (index, value) =>
svMap.put(index, value)
}
assert(svMap.size === 3)
assert(svMap.get(1) === Some(1.2))
assert(svMap.get(2) === Some(3.1))
assert(svMap.get(3) === Some(0.0))
}
test("vector p-norm") {
val dv = Vectors.dense(0.0, -1.2, 3.1, 0.0, -4.5, 1.9)
val sv = Vectors.sparse(6, Seq((1, -1.2), (2, 3.1), (3, 0.0), (4, -4.5), (5, 1.9)))
assert(Vectors.norm(dv, 1.0) ~== dv.toArray.foldLeft(0.0)((a, v) =>
a + math.abs(v)) relTol 1E-8)
assert(Vectors.norm(sv, 1.0) ~== sv.toArray.foldLeft(0.0)((a, v) =>
a + math.abs(v)) relTol 1E-8)
assert(Vectors.norm(dv, 2.0) ~== math.sqrt(dv.toArray.foldLeft(0.0)((a, v) =>
a + v * v)) relTol 1E-8)
assert(Vectors.norm(sv, 2.0) ~== math.sqrt(sv.toArray.foldLeft(0.0)((a, v) =>
a + v * v)) relTol 1E-8)
assert(Vectors.norm(dv, Double.PositiveInfinity) ~== dv.toArray.map(math.abs).max relTol 1E-8)
assert(Vectors.norm(sv, Double.PositiveInfinity) ~== sv.toArray.map(math.abs).max relTol 1E-8)
assert(Vectors.norm(dv, 3.7) ~== math.pow(dv.toArray.foldLeft(0.0)((a, v) =>
a + math.pow(math.abs(v), 3.7)), 1.0 / 3.7) relTol 1E-8)
assert(Vectors.norm(sv, 3.7) ~== math.pow(sv.toArray.foldLeft(0.0)((a, v) =>
a + math.pow(math.abs(v), 3.7)), 1.0 / 3.7) relTol 1E-8)
}
}
| trueyao/spark-lever | mllib/src/test/scala/org/apache/spark/mllib/linalg/VectorsSuite.scala | Scala | apache-2.0 | 9,131 |
object Test {
def main(args: Array[String]): Unit = {
def foo (erased i: Int) = 0
val f: (erased Int) => Int =
(erased x: Int) => {
x // error
}
val f2: (erased Int) => Int =
(erased x: Int) => {
foo(x)
}
}
}
| som-snytt/dotty | tests/neg-custom-args/erased/erased-4.scala | Scala | apache-2.0 | 269 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.