code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package is.hail.expr.ir
import is.hail.{ExecStrategy, HailSuite}
import is.hail.utils._
import is.hail.TestUtils._
import is.hail.types._
import is.hail.types.virtual.TString
import org.testng.annotations.Test
class StringSliceSuite extends HailSuite {
implicit val execStrats = ExecStrategy.javaOnly
@Test def unicodeSlicingSlicesCodePoints() {
val poopEmoji = "\\uD83D\\uDCA9"
val s = s"abc${ poopEmoji }def"
// FIXME: The replacement character for slicing halfway into a
// 2-codepoint-wide character differs between UTF8 and UTF16.
// We've tested against the UTF8 character here since that's the encoding we
// currently use, but the replacement character for utf16 is /ufffd.
val replacementCharacter = "?"
assertEvalsTo(invoke("slice", TString, Str(s), I32(0), I32(4)), s"abc$replacementCharacter")
assertEvalsTo(invoke("slice", TString, Str(s), I32(4), I32(8)), s"${ replacementCharacter }def")
assertEvalsTo(invoke("slice", TString, Str(s), I32(0), I32(5)), s"abc$poopEmoji")
}
@Test def zeroToLengthIsIdentity() {
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(0), I32(3)), "abc")
}
@Test def simpleSlicesMatchIntuition() {
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(3), I32(3)), "")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(1), I32(3)), "bc")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(2), I32(3)), "c")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(0), I32(2)), "ab")
}
@Test def sizeZeroSliceIsEmptyString() {
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(2), I32(2)), "")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(1), I32(1)), "")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(0), I32(0)), "")
}
@Test def substringMatchesJavaStringSubstring() {
assertEvalsTo(
invoke("substring", TString, Str("abc"), I32(0), I32(2)),
"abc".substring(0, 2))
assertEvalsTo(
invoke("substring", TString, Str("foobarbaz"), I32(3), I32(5)),
"foobarbaz".substring(3, 5))
}
@Test def isStrict() {
assertEvalsTo(invoke("slice", TString, NA(TString), I32(0), I32(2)), null)
assertEvalsTo(invoke("slice", TString, NA(TString), I32(-5), I32(-10)), null)
}
@Test def leftSliceMatchesIntuition() {
assertEvalsTo(invoke("sliceRight", TString, Str("abc"), I32(2)), "c")
assertEvalsTo(invoke("sliceRight", TString, Str("abc"), I32(1)), "bc")
}
@Test def rightSliceMatchesIntuition() {
assertEvalsTo(invoke("sliceLeft", TString, Str("abc"), I32(2)), "ab")
assertEvalsTo(invoke("sliceLeft", TString, Str("abc"), I32(1)), "a")
}
@Test def bothSideSliceMatchesIntuition() {
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(0), I32(2)), "ab")
// assertEvalsTo(invoke("slice", TString, Str("abc"), I32(1), I32(3)), "bc")
}
@Test def leftSliceIsPythony() {
assertEvalsTo(invoke("sliceRight", TString, Str("abc"), I32(-1)), "c")
assertEvalsTo(invoke("sliceRight", TString, Str("abc"), I32(-2)), "bc")
}
@Test def rightSliceIsPythony() {
assertEvalsTo(invoke("sliceLeft", TString, Str("abc"), I32(-1)), "ab")
assertEvalsTo(invoke("sliceLeft", TString, Str("abc"), I32(-2)), "a")
}
@Test def sliceIsPythony() {
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(-3), I32(-1)), "ab")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(-3), I32(-2)), "a")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(-2), I32(-1)), "b")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(-2), I32(-2)), "")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(-3), I32(-3)), "")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(1), I32(-1)), "b")
}
@Test def bothSidesSliceFunctionOutOfBoundsNotFatal() {
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(4), I32(4)), "")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(3), I32(2)), "")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(-1), I32(2)), "")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(-1), I32(-1)), "")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(3), I32(3)), "")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(-10), I32(-5)), "")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(-5), I32(-10)), "")
assertEvalsTo(invoke("slice", TString, Str("abc"), I32(-10), I32(-1)), "ab")
}
@Test def leftSliceFunctionOutOfBoundsNotFatal() {
assertEvalsTo(invoke("sliceRight", TString, Str("abc"), I32(15)), "")
assertEvalsTo(invoke("sliceRight", TString, Str("abc"), I32(4)), "")
assertEvalsTo(invoke("sliceRight", TString, Str("abc"), I32(3)), "")
assertEvalsTo(invoke("sliceRight", TString, Str("abc"), I32(-3)), "abc")
assertEvalsTo(invoke("sliceRight", TString, Str("abc"), I32(-4)), "abc")
assertEvalsTo(invoke("sliceRight", TString, Str("abc"), I32(-100)), "abc")
}
@Test def rightSliceFunctionOutOfBoundsNotFatal() {
assertEvalsTo(invoke("sliceLeft", TString, Str("abc"), I32(15)), "abc")
assertEvalsTo(invoke("sliceLeft", TString, Str("abc"), I32(4)), "abc")
assertEvalsTo(invoke("sliceLeft", TString, Str("abc"), I32(3)), "abc")
assertEvalsTo(invoke("sliceLeft", TString, Str("abc"), I32(-3)), "")
assertEvalsTo(invoke("sliceLeft", TString, Str("abc"), I32(-4)), "")
assertEvalsTo(invoke("sliceLeft", TString, Str("abc"), I32(-100)), "")
}
@Test def testStringIndex() {
assertEvalsTo(invoke("index", TString, In(0, TString), I32(0)), FastIndexedSeq("Baz" -> TString), "B")
assertEvalsTo(invoke("index", TString, In(0, TString), I32(1)), FastIndexedSeq("Baz" -> TString), "a")
assertEvalsTo(invoke("index", TString, In(0, TString), I32(2)), FastIndexedSeq("Baz" -> TString), "z")
assertEvalsTo(invoke("index", TString, In(0, TString), I32(-1)), FastIndexedSeq("Baz" -> TString), "z")
assertEvalsTo(invoke("index", TString, In(0, TString), I32(-2)), FastIndexedSeq("Baz" -> TString), "a")
assertEvalsTo(invoke("index", TString, In(0, TString), I32(-3)), FastIndexedSeq("Baz" -> TString), "B")
interceptFatal("string index out of bounds") {
assertEvalsTo(invoke("index", TString, In(0, TString), I32(3)), FastIndexedSeq("Baz" -> TString), "B")
}
interceptFatal("string index out of bounds") {
assertEvalsTo(invoke("index", TString, In(0, TString), I32(-4)), FastIndexedSeq("Baz" -> TString), "B")
}
}
}
| hail-is/hail | hail/src/test/scala/is/hail/expr/ir/StringSliceSuite.scala | Scala | mit | 6,486 |
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.nsc
package interpreter
import util.stringFromWriter
class Formatting(indent: Int) {
private val indentation = " " * indent
private def indenting(code: String): Boolean = {
/** Heuristic to avoid indenting and thereby corrupting """-strings and XML literals. */
val tokens = List("\\"\\"\\"", "</", "/>")
val noIndent = (code contains "\\n") && (tokens exists code.contains)
!noIndent
}
/** Indent some code by the width of the scala> prompt.
* This way, compiler error messages read better.
*/
def indentCode(code: String) = stringFromWriter(str =>
for (line <- code.lines) {
if (indenting(code)) str print indentation
str println line
str.flush()
}
)
}
object Formatting {
def forPrompt(prompt: String) = new Formatting(prompt.lines.toList.last.length)
}
| felixmulder/scala | src/repl/scala/tools/nsc/interpreter/Formatting.scala | Scala | bsd-3-clause | 933 |
/** NORMAStage.scala -> This file computes the update for all applications of NORMA
Copyright (C) 2015 Stephen Tridgell
This file is part of a pipelined OLK application.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this code. If not, see <http://www.gnu.org/licenses/>.
*/
package OLK.NORMAStage
import Chisel._
class IOBundle(val bitWidth : Int, val fracWidth : Int) extends Bundle {
val forceNA = Bool(INPUT)
val sum = Fixed(INPUT, bitWidth, fracWidth)
val zp = Fixed(INPUT, bitWidth, fracWidth)
val wD = Fixed(INPUT, bitWidth, fracWidth)
val forget = Fixed(INPUT, bitWidth, fracWidth)
val etapos = Fixed(INPUT, bitWidth, fracWidth) // = eta
val etaneg = Fixed(INPUT, bitWidth, fracWidth) // = -eta
val etanu = Fixed(INPUT, bitWidth, fracWidth) // = eta*nu
val etanu1 = Fixed(INPUT, bitWidth, fracWidth) // = -eta*(1-nu)
val forceNAout = Bool(OUTPUT)
val addToDict = Bool(OUTPUT)
val ft = Fixed(OUTPUT, bitWidth, fracWidth)
val alpha = Fixed(OUTPUT, bitWidth, fracWidth)
}
// Only used for NORMAc
class IOBundle_C(val bitWidth_c : Int, val fracWidth_c : Int) extends IOBundle(bitWidth_c, fracWidth_c) {
val yC = Bool(INPUT)
}
// Only used for NORMAr
class IOBundle_R(val bitWidth_r : Int, val fracWidth_r : Int) extends IOBundle(bitWidth_r, fracWidth_r) {
val yReg = Fixed(INPUT, bitWidth_r, fracWidth_r)
}
/** NORMAStage
This file computes the update for NORMA
*/
class NORMAStage(val bitWidth : Int, val fracWidth : Int, val NORMAtype : Int) extends Module {
Predef.assert(NORMAtype == 1 || NORMAtype == 2 || NORMAtype == 3,
"Norma type must be Classification = 1, Novelty = 2, Regression = 3")
val ZERO = Fixed(0, bitWidth, fracWidth)
var yC = Bool(true)
var yReg = ZERO
val rhoReg = Reg(init=ZERO)
val bReg = Reg(init=ZERO)
val alphaReg = Reg(init=ZERO)
val ftReg = Reg(init=ZERO)
val addToDictReg = Reg(init=Bool(false))
val io = {
if (NORMAtype == 1) {
val res = new IOBundle_C(bitWidth, fracWidth); yC = res.yC; res
} else if (NORMAtype == 3) {
val res = new IOBundle_R(bitWidth, fracWidth); yReg = res.yReg; res
} else {
new IOBundle(bitWidth, fracWidth) }}
val NORMA = { if (NORMAtype == 1) {
val res = Module(new NORMAc(bitWidth, fracWidth))
res.io.bOld := bReg
bReg := Mux(io.forceNA, bReg, res.io.bNew)
res.io.y := yC
res.io.etapos := io.etapos
res.io.etaneg := io.etaneg
alphaReg := Mux(res.io.sign, io.etapos, io.etaneg)
res
} else if (NORMAtype == 2) {
val res = Module(new NORMAn(bitWidth, fracWidth))
alphaReg := io.etapos
res
} else {
val res = Module(new NORMAr(bitWidth, fracWidth))
res.io.y := yReg
alphaReg := Mux(res.io.sign, io.etapos, io.etaneg)
res
} }
val forceNAReg = Reg(init=Bool(true), next=io.forceNA)
val sumForceNA = Mux(forceNAReg, io.sum, io.forget*%io.sum)
val wDForceNA = Mux(forceNAReg, io.wD, io.forget*%io.wD)
// Common Section
val ft = Mux(addToDictReg, (alphaReg*%io.zp) + sumForceNA, sumForceNA + wDForceNA)
if (NORMAtype == 2)
ftReg := ft - rhoReg
else
ftReg := ft
NORMA.io.ft := ft
NORMA.io.rhoOld := rhoReg
NORMA.io.etanu := io.etanu
NORMA.io.etanu1 := io.etanu1
val newRho = Mux(io.forceNA, rhoReg, NORMA.io.rhoNew)
rhoReg := newRho
printf("rhoReg = %x\\n", rhoReg)
io.forceNAout := forceNAReg
io.alpha := alphaReg
io.ft := ftReg
addToDictReg := Mux(io.forceNA, Bool(false), NORMA.io.addToDict)
io.addToDict := addToDictReg
}
| da-steve101/chisel-pipelined-olk | src/main/scala/NORMAStage/NORMAStage.scala | Scala | gpl-2.0 | 4,031 |
package com.arcusys.valamis.lesson.scorm.service.sequencing
import com.arcusys.valamis.lesson.scorm.model.manifest._
import com.arcusys.valamis.lesson.scorm.model.tracking.{ ActivityState, ObjectiveState }
@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner])
class ExitConditionRuleTest extends ActivityStateTreeTestBase {
def container(attemptLimit: Option[Int] = None, exitConditionRules: Seq[ExitConditionRule] = Nil) = containerActivity("C1", Some(organizationId), attemptLimit = attemptLimit, exitConditionRules = exitConditionRules)
def activityState(attemptCompleted: Option[Boolean] = None, attemptCount: Int = 0, attemptLimit: Option[Int] = None, objectiveStates: Map[Option[String], ObjectiveState] = Map(), exitConditionRules: Seq[ExitConditionRule] = Nil) =
new ActivityState(activity = container(attemptLimit, exitConditionRules), active = false, suspended = false, attemptCompleted = attemptCompleted,
attemptCompletionAmount = None, attemptAbsoluteDuration = 0, attemptExperiencedDuration = 0,
activityAbsoluteDuration = 0, activityExperiencedDuration = 0, attemptCount = attemptCount, objectiveStates = objectiveStates)
//val checker = new RuleConditionChecker
"Exit condition rules" should "evaluate to true if all rules are true" in {
val activity = activityState(attemptCompleted = Some(false), attemptLimit = Some(4), attemptCount = 5,
objectiveStates = Map(Some("OBJ1") -> new ObjectiveState(satisfied = Some(true), normalizedMeasure = Some(0), objectiveMapInfo = ObjectiveMap.Empty)),
exitConditionRules = Seq(
new ExitConditionRule(RuleConditionSet(new RuleCondition(ConditionType.ActivityAttemptLimitExceeded))),
new ExitConditionRule(RuleConditionSet(new RuleCondition(ConditionType.ObjectiveMeasureKnown, objectiveId = Some("OBJ1"))))
)
)
activity.exitConditionRuleApplies should equal(true)
}
it should "evaluate to true if at least one rule is true" in {
val activity = activityState(attemptCompleted = Some(false), attemptLimit = Some(4), attemptCount = 3,
objectiveStates = Map(Some("OBJ1") -> new ObjectiveState(satisfied = Some(true), normalizedMeasure = Some(0), objectiveMapInfo = ObjectiveMap.Empty)),
exitConditionRules = Seq(
new ExitConditionRule(RuleConditionSet(new RuleCondition(ConditionType.ActivityAttemptLimitExceeded))),
new ExitConditionRule(RuleConditionSet(new RuleCondition(ConditionType.ObjectiveMeasureKnown, objectiveId = Some("OBJ1"))))
)
)
activity.exitConditionRuleApplies should equal(true)
}
it should "evaluate to true if single rule is true" in {
val activity = activityState(attemptCompleted = Some(false), attemptLimit = Some(4), attemptCount = 3,
objectiveStates = Map(Some("OBJ1") -> new ObjectiveState(satisfied = Some(true), normalizedMeasure = Some(0), objectiveMapInfo = ObjectiveMap.Empty)),
exitConditionRules = Seq(
new ExitConditionRule(RuleConditionSet(new RuleCondition(ConditionType.ObjectiveMeasureKnown, objectiveId = Some("OBJ1"))))
)
)
activity.exitConditionRuleApplies should equal(true)
}
it should "evaluate to false if all rules are false" in {
val activity = activityState(attemptCompleted = Some(false), attemptLimit = Some(4), attemptCount = 3,
objectiveStates = Map(Some("OBJ1") -> new ObjectiveState(satisfied = Some(true), normalizedMeasure = None, objectiveMapInfo = ObjectiveMap.Empty)),
exitConditionRules = Seq(
new ExitConditionRule(RuleConditionSet(new RuleCondition(ConditionType.ActivityAttemptLimitExceeded))),
new ExitConditionRule(RuleConditionSet(new RuleCondition(ConditionType.ObjectiveMeasureKnown, objectiveId = Some("OBJ1"))))
)
)
activity.exitConditionRuleApplies should equal(false)
}
it should "evaluate to false if single rule is false" in {
val activity = activityState(attemptCompleted = Some(false), attemptLimit = Some(4), attemptCount = 3,
objectiveStates = Map(Some("OBJ1") -> new ObjectiveState(satisfied = Some(true), normalizedMeasure = None, objectiveMapInfo = ObjectiveMap.Empty)),
exitConditionRules = Seq(
new ExitConditionRule(RuleConditionSet(new RuleCondition(ConditionType.ObjectiveMeasureKnown, objectiveId = Some("OBJ1"))))
)
)
activity.exitConditionRuleApplies should equal(false)
}
it should "evaluate to false if there're no rules" in {
val activity = activityState(attemptCompleted = Some(false), attemptLimit = Some(4), attemptCount = 3,
objectiveStates = Map(Some("OBJ1") -> new ObjectiveState(satisfied = Some(true), normalizedMeasure = None, objectiveMapInfo = ObjectiveMap.Empty)),
exitConditionRules = Nil
)
activity.exitConditionRuleApplies should equal(false)
}
}
| ViLPy/Valamis | valamis-scorm-lesson/src/test/scala/com/arcusys/valamis/lesson/scorm/service/sequencing/ExitConditionRuleTest.scala | Scala | lgpl-3.0 | 4,814 |
package com.nutomic.ensichat.fragments
import android.app.ListFragment
import android.content.{BroadcastReceiver, Context, Intent, IntentFilter}
import android.os.Bundle
import android.support.v4.content.LocalBroadcastManager
import android.support.v7.widget.Toolbar
import android.view.View.OnClickListener
import android.view.inputmethod.EditorInfo
import android.view.{KeyEvent, LayoutInflater, View, ViewGroup}
import android.widget.TextView.OnEditorActionListener
import android.widget._
import com.nutomic.ensichat.R
import com.nutomic.ensichat.activities.EnsichatActivity
import com.nutomic.ensichat.core.messages.body.Text
import com.nutomic.ensichat.core.messages.Message
import com.nutomic.ensichat.core.routing.Address
import com.nutomic.ensichat.core.ConnectionHandler
import com.nutomic.ensichat.service.CallbackHandler
import com.nutomic.ensichat.views.{DatesAdapter, MessagesAdapter}
/**
* Represents a single chat with another specific device.
*/
class ChatFragment extends ListFragment with OnClickListener {
/**
* Fragments need to have a default constructor, so this is optional.
*/
def this(address: Address) {
this
this.address = address
}
private lazy val activity = getActivity.asInstanceOf[EnsichatActivity]
private var address: Address = _
private var chatService: ConnectionHandler = _
private var sendButton: Button = _
private var messageText: EditText = _
private var listView: ListView = _
private var adapter: DatesAdapter = _
override def onActivityCreated(savedInstanceState: Bundle): Unit = {
super.onActivityCreated(savedInstanceState)
activity.runOnServiceConnected(() => {
chatService = activity.service.get
activity.database.get.getContact(address).foreach(c => getActivity.setTitle(c.name))
adapter = new DatesAdapter(getActivity,
new MessagesAdapter(getActivity, activity.database.get.getMessages(address), address))
if (listView != null) {
listView.setAdapter(adapter)
}
})
}
override def onCreateView(inflater: LayoutInflater, container: ViewGroup,
savedInstanceState: Bundle): View = {
val view = inflater.inflate(R.layout.fragment_chat, container, false)
val toolbar = view.findViewById(R.id.toolbar).asInstanceOf[Toolbar]
activity.setSupportActionBar(toolbar)
activity.getSupportActionBar.setDisplayHomeAsUpEnabled(true)
sendButton = view.findViewById(R.id.send).asInstanceOf[Button]
sendButton.setOnClickListener(this)
messageText = view.findViewById(R.id.message).asInstanceOf[EditText]
messageText.setOnEditorActionListener(new OnEditorActionListener {
override def onEditorAction(view: TextView, actionId: Int, event: KeyEvent): Boolean = {
if (actionId == EditorInfo.IME_ACTION_DONE) {
onClick(sendButton)
true
} else
false
}
})
listView = view.findViewById(android.R.id.list).asInstanceOf[ListView]
listView.setAdapter(adapter)
view
}
override def onCreate(savedInstanceState: Bundle): Unit = {
super.onCreate(savedInstanceState)
if (savedInstanceState != null)
address = new Address(savedInstanceState.getByteArray("address"))
LocalBroadcastManager.getInstance(getActivity)
.registerReceiver(onMessageReceivedReceiver, new IntentFilter(CallbackHandler.ActionMessageReceived))
}
override def onSaveInstanceState(outState: Bundle): Unit = {
super.onSaveInstanceState(outState)
outState.putByteArray("address", address.bytes)
}
override def onDestroy(): Unit = {
super.onDestroy()
LocalBroadcastManager.getInstance(getActivity).unregisterReceiver(onMessageReceivedReceiver)
}
/**
* Send message if send button was clicked.
*/
override def onClick(view: View): Unit = view.getId match {
case R.id.send =>
val text = messageText.getText.toString.trim
if (!text.isEmpty) {
val message = new Text(text.toString)
chatService.sendTo(address, message)
messageText.getText.clear()
}
}
/**
* Displays new messages in UI.
*/
private val onMessageReceivedReceiver = new BroadcastReceiver {
override def onReceive(context: Context, intent: Intent): Unit = {
val msg = intent.getSerializableExtra(CallbackHandler.ExtraMessage).asInstanceOf[Message]
if (!Set(msg.header.origin, msg.header.target).contains(address))
return
msg.body match {
case _: Text =>
val messages = activity.database.get.getMessages(address)
adapter.replaceItems(messages)
case _ =>
}
}
}
}
| Nutomic/ensichat | android/src/main/scala/com/nutomic/ensichat/fragments/ChatFragment.scala | Scala | mpl-2.0 | 4,658 |
package com.twitter.util
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
@RunWith(classOf[JUnitRunner])
class CloseAwaitablyTest extends FunSuite {
def make() = new Closable with CloseAwaitably {
val p = new Promise[Unit]
var n = 0
def close(deadline: Time) = closeAwaitably {
n += 1
p
}
}
test("close") {
val c = make()
assert(c.n == 0)
val f = c.close(Time.now)
assert(f != c.p)
assert(c.n == 1)
assert(c.close(Time.now) == f)
assert(c.n == 1)
assert(f.poll == None)
c.p.setDone()
assert(f.poll == Some(Return.Unit))
}
test("Await.ready") {
val c = make()
val t = new Thread {
start()
override def run() {
Await.ready(c)
}
}
c.close(Time.now)
assert(t.isAlive)
c.p.setDone()
t.join(10000)
assert(!t.isAlive)
}
}
| mosesn/util | util-core/src/test/scala/com/twitter/util/AwaitableTest.scala | Scala | apache-2.0 | 910 |
package com.blinkbox.books.reading.persistence
import java.net.URI
import com.blinkbox.books.reading._
import org.joda.time.DateTime
case class LibraryItem(
isbn: String,
userId: Int,
ownership: Ownership,
readingStatus: ReadingStatus,
progressCfi: Option[Cfi],
progressPercentage: Int,
createdAt: DateTime,
updatedAt: DateTime
)
case class LibraryItemLink(
isbn: String, mediaType: LibraryMediaLinkType,
uri: URI,
createdAt: DateTime,
updatedAt: DateTime
)
| blinkboxbooks/reading-service | common/src/main/scala/com/blinkbox/books/reading/persistence/DbModels.scala | Scala | mit | 486 |
/*
* Wire
* Copyright (C) 2016 Wire Swiss GmbH
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.waz.testutils
import java.util.concurrent.atomic.AtomicInteger
import com.waz.api.{UiObservable, UpdateListener}
import com.waz.utils.returning
case class UpdateSpy() extends UpdateListener with SpyBase {
def updated(): Unit = increment()
}
object UpdateSpy {
def apply(observable: UiObservable): UpdateSpy = returning(UpdateSpy())(observable.addUpdateListener)
}
trait SpyBase {
private val count = new AtomicInteger(0)
def numberOfTimesCalled: Int = count.get
def increment(): Unit = count.incrementAndGet
def reset(): Unit = count set 0
}
| wireapp/wire-android-sync-engine | zmessaging/src/test/scala/com/waz/testutils/UpdateSpy.scala | Scala | gpl-3.0 | 1,270 |
package appconfig
import helpers.{LoggingComponentImpl, LoggingComponent}
import pdf.{MenusForPeriodParserComponentImpl, ITextPdfParserComponent, FichierMenusParserComponent, PdfParserComponent}
import repositories.MenuRepositoryComponent
import repositories.MenuRepositoryComponentImpl
import services.{ServicesComponentImpl, ServicesComponent}
trait Context
extends LoggingComponent
with PdfParserComponent
with FichierMenusParserComponent
with MenuRepositoryComponent
with ServicesComponent
trait ProductionContext extends Context
with LoggingComponentImpl
with ITextPdfParserComponent
with MenusForPeriodParserComponentImpl
with MenuRepositoryComponentImpl
with ServicesComponentImpl | dlecan/cantines-scolaires | backend/app/AppConfig.scala | Scala | apache-2.0 | 710 |
import sbt._
object Dependencies {
val commonDependencies = {
val akkaVersion = "2.4.1"
val akkaStreamVersion = "2.0.1"
Seq(
"org.scala-lang" % "scala-library" % "2.11.7",
"org.scala-lang" % "scala-reflect" % "2.11.7",
"wiii" %% "akka-injects" % "0.1",
"gpio4s" %% "gpiocfg" % "0.1",
"io.reactivex" %% "rxscala" % "0.25.0",
"net.java.dev.jna" % "jna" % "4.2.1",
"com.nativelibs4java" % "jnaerator-runtime" % "0.12",
"com.typesafe.akka" %% "akka-actor" % akkaVersion,
"com.typesafe.akka" %% "akka-slf4j" % akkaVersion % Runtime,
"com.typesafe.akka" %% "akka-stream-experimental" % akkaStreamVersion,
"org.scalatest" %% "scalatest" % "2.2.5" % Test,
"com.typesafe.akka" %% "akka-testkit" % akkaVersion % Test,
"com.typesafe.akka" %% "akka-stream-testkit-experimental" % akkaStreamVersion % Test,
"org.scalamock" %% "scalamock-scalatest-support" % "3.2.2" % Test
)
}
}
| jw3/rxgpio | project/Dependencies.scala | Scala | apache-2.0 | 1,081 |
/**
* Copyright (c) 2012, www.quartzsource.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.quartzsource.meutrino
import org.junit.Test
import java.io.BufferedOutputStream
import java.io.File
import java.io.FileOutputStream
@Test
class FromDosTest extends AbstractTest {
val rewrite = false
@Test
def testRepository {
val files = getData(new File("."))
files.foreach(name => rewrite(name))
}
def getData(folder: File): List[File] = {
folder.listFiles.toList.flatMap(name => {
name.isDirectory() match {
case true => getData(name)
case false => name.getPath() match {
case n if (n.endsWith(".class")) => Nil
case n if (n.endsWith(".jar")) => Nil
case n if (n.endsWith(".project")) => Nil
case n if (n.endsWith(".classpath")) => Nil
case n if (n.endsWith(".scala_dependencies")) => Nil
case n if (n.endsWith("hotspot.log")) => Nil
case n if (isIn(n, "target")) => Nil
case n if (isIn(n, "bin")) => Nil
case n if (isIn(n, ".settings")) => Nil
case n if (isIn(n, ".hg")) => Nil
case n => List(name)
}
}
})
}
def isIn(name: String, folder: String): Boolean = {
val f = s"${File.separator}${folder}${File.separator}"
name.contains(f)
}
def rewrite(file: File): Boolean = {
try {
val source = scala.io.Source.fromFile(file).mkString
val processed = source.trim().split("\\r\\n").toList.mkString("\\n") + "\\n"
if (source.indexOf("\\r\\n") > 0 && source != processed) {
println("Found CRLF EOL in: " + file.getCanonicalPath())
if (rewrite) {
val os = new BufferedOutputStream(new FileOutputStream(file))
os.write(processed.getBytes("UTF-8"))
os.close()
println("Fixed: " + file.getCanonicalPath())
}
true
} else {
false
}
} catch {
case e: Exception => {
println("Cannot read: " + file.getCanonicalPath())
true
}
}
}
}
| cyberspinach/meutrino | src/test/scala/org/quartzsource/meutrino/FromDosTest.scala | Scala | apache-2.0 | 2,582 |
package ammonite.repl
import ammonite.interp.Interpreter
import ammonite.runtime._
import ammonite.util._
class ReplApiImpl(interp: Interpreter,
width0: => Int,
height0: => Int,
colors0: Ref[Colors],
prompt0: Ref[String],
frontEnd0: Ref[FrontEnd],
history0: => History,
sess0: Session,
replArgs0: Seq[Bind[_]]) extends RuntimeApiImpl(interp, width0, height0, colors0, history0, sess0, replArgs0) with ReplAPI{
val prompt = prompt0
val frontEnd = frontEnd0
}
| alexarchambault/ammonium | amm/repl/src/main/scala/ammonite/repl/ReplApiImpl.scala | Scala | mit | 610 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package toolkit.neuralnetwork.layer
import libcog._
import toolkit.neuralnetwork.function.{Convolution, TrainableState}
import toolkit.neuralnetwork.policy._
import toolkit.neuralnetwork.{DifferentiableField, WeightBinding}
object ConvolutionLayer {
def apply(input: DifferentiableField,
filterShape: Shape,
filterNum: Int,
border: BorderPolicy,
learningRule: LearningRule,
stride: Int = 1,
impl: ConvolutionalLayerPolicy = Best,
initPolicy: WeightInitPolicy = ConvInit,
weightBinding: WeightBinding = EmptyBinding): Layer = {
val inputShape = input.forward.fieldShape
val inputTensorShape = input.forward.tensorShape
require(inputTensorShape.dimensions == 1, s"input must be a vector field, got $inputTensorShape")
require(inputTensorShape(0) % input.batchSize == 0, s"input vector length (${inputTensorShape(0)}) must be an integer multiple of the batch size (${input.batchSize})")
require(filterShape.dimensions == 2, s"filters must be 2D, got $filterShape")
require(filterNum >= 1, s"filter bank must contain at least one filter, got $filterNum")
val inputLen = inputTensorShape(0) / input.batchSize
// Allocating `filterNum` filters, each of a shape specified by `filterShape` and `inputLen` planes deep
val weights = TrainableState(filterShape, Shape(inputLen * filterNum), initPolicy, learningRule, weightBinding)
Layer(Convolution(input, weights, border, stride, impl), weights)
}
}
| hpe-cct/cct-nn | src/main/scala/toolkit/neuralnetwork/layer/ConvolutionLayer.scala | Scala | apache-2.0 | 2,162 |
package akka.persistence.journal.bdb
import java.nio.ByteBuffer
import java.util.concurrent.atomic.AtomicLong
import akka.actor.Actor
import com.sleepycat.je._
import scala.annotation.tailrec
private[bdb] trait BdbKeys extends Actor {
this: BdbJournal =>
var currentId: AtomicLong = new AtomicLong(10L)
var mapping: Map[String, Long] = Map.empty
val mappingDbConfig = new DatabaseConfig()
.setAllowCreate(true)
.setTransactional(true)
val mappingDb = env.openDatabase(null, "processorIdMapping", mappingDbConfig)
def getKey(processorId: String, sequenceNo: Long): DatabaseEntry = {
val buffer = ByteBuffer.allocate(16)
buffer.putLong(getPersistenceId(processorId))
buffer.putLong(sequenceNo)
new DatabaseEntry(buffer.array)
}
def getKey(processorId: Long, seqNo: Long): DatabaseEntry = {
new DatabaseEntry(
ByteBuffer.allocate(16)
.putLong(processorId)
.putLong(seqNo)
.array
)
}
def getMaxSeqnoKey(processorId: Long): DatabaseEntry = {
new DatabaseEntry(
ByteBuffer.allocate(16)
.putLong(0L)
.putLong(processorId)
.array
)
}
def getPersistenceId(persistenceId: String): Long = {
mapping.get(persistenceId) match {
case Some(id) => id
case None =>
val nextId = currentId.addAndGet(1L)
val dbKey = new DatabaseEntry(persistenceId.getBytes("UTF-8"))
val dbVal = new DatabaseEntry(ByteBuffer.allocate(8).putLong(nextId).array)
val tx = env.beginTransaction(null, null)
try {
if (mappingDb.put(tx, dbKey, dbVal) == OperationStatus.KEYEXIST) {
throw new IllegalStateException("Attempted to insert already existing persistenceId mapping.")
}
mapping = mapping + (persistenceId -> nextId)
nextId
} finally {
cleanupTx(tx)
}
}
}
def init() = {
@tailrec
def cursorIterate(first: Boolean, cursor: Cursor, mapping: Map[String, Long]): Map[String, Long] = {
val dbKey = new DatabaseEntry()
val dbVal = new DatabaseEntry()
first match {
case true =>
if (cursor.getFirst(dbKey, dbVal, LockMode.DEFAULT) == OperationStatus.SUCCESS) {
cursorIterate(first = false, cursor, Map(new String(dbKey.getData, "UTF-8") -> ByteBuffer.wrap(dbVal.getData).getLong))
} else {
Map.empty
}
case false =>
if (cursor.getNext(dbKey, dbVal, LockMode.DEFAULT) == OperationStatus.SUCCESS) {
cursorIterate(first = false, cursor, mapping + (new String(dbKey.getData, "UTF-8") -> ByteBuffer.wrap(dbVal.getData).getLong))
} else {
mapping
}
}
}
withTransactionalCursor(mappingDb) {
(cursor, tx) =>
mapping = cursorIterate(first = true, cursor, Map.empty)
}
}
override def preStart(): Unit = {
super.preStart()
init()
}
override def postStop(): Unit = {
mappingDb.close()
super.postStop()
}
}
| ataraxer/akka-persistence-bdb | src/main/scala/akka/persistence/journal/bdb/BdbKeys.scala | Scala | apache-2.0 | 3,051 |
package org.jetbrains.plugins.scala
package annotator
import com.intellij.codeInsight.daemon.impl.AnnotationHolderImpl
import com.intellij.codeInsight.intention.IntentionAction
import com.intellij.codeInspection._
import com.intellij.lang.annotation._
import com.intellij.openapi.editor.markup.TextAttributes
import com.intellij.openapi.project.DumbAware
import com.intellij.openapi.roots.ProjectFileIndex
import com.intellij.openapi.util.{Condition, Key, TextRange}
import com.intellij.psi._
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugins.scala.annotator.createFromUsage._
import org.jetbrains.plugins.scala.annotator.importsTracker.ImportTracker._
import org.jetbrains.plugins.scala.annotator.importsTracker.ScalaRefCountHolder
import org.jetbrains.plugins.scala.annotator.intention._
import org.jetbrains.plugins.scala.annotator.intention.sbt.AddSbtDependencyFix
import org.jetbrains.plugins.scala.annotator.modifiers.ModifierChecker
import org.jetbrains.plugins.scala.annotator.quickfix._
import org.jetbrains.plugins.scala.annotator.template._
import org.jetbrains.plugins.scala.codeInspection.caseClassParamInspection.{RemoveValFromEnumeratorIntentionAction, RemoveValFromGeneratorIntentionAction}
import org.jetbrains.plugins.scala.components.HighlightingAdvisor
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.highlighter.{AnnotatorHighlighter, DefaultHighlighter}
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.macros.expansion.RecompileAnnotationAction
import org.jetbrains.plugins.scala.lang.psi.api.base._
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.{ScConstructorPattern, ScInfixPattern, ScPattern}
import org.jetbrains.plugins.scala.lang.psi.api.base.types._
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression.ExpressionTypeResult
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScClassParameter, ScParameter, ScParameters, ScTypeParam}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.usages.{ImportUsed, ReadValueUsed, ValueUsed, WriteValueUsed}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.{ScImportExpr, ScImportSelector}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.ScTemplateBody
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef._
import org.jetbrains.plugins.scala.lang.psi.api.{ScalaElementVisitor, ScalaFile}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createTypeFromText
import org.jetbrains.plugins.scala.lang.psi.impl.expr.ScInterpolatedStringPartReference
import org.jetbrains.plugins.scala.lang.psi.impl.{ScalaPsiElementFactory, ScalaPsiManager}
import org.jetbrains.plugins.scala.lang.psi.light.scala.isLightScNamedElement
import org.jetbrains.plugins.scala.lang.psi.types.api._
import org.jetbrains.plugins.scala.lang.psi.types.api.designator.ScDesignatorType
import org.jetbrains.plugins.scala.lang.psi.types.result._
import org.jetbrains.plugins.scala.lang.psi.types.{Compatibility, ScType, ScalaType, ValueClassType}
import org.jetbrains.plugins.scala.lang.psi.{ScalaPsiElement, ScalaPsiUtil}
import org.jetbrains.plugins.scala.lang.resolve._
import org.jetbrains.plugins.scala.lang.resolve.processor.MethodResolveProcessor
import org.jetbrains.plugins.scala.lang.scaladoc.parser.parsing.MyScaladocParsing
import org.jetbrains.plugins.scala.lang.scaladoc.psi.api.{ScDocResolvableCodeReference, ScDocTag}
import org.jetbrains.plugins.scala.lang.scaladoc.psi.impl.ScDocResolvableCodeReferenceImpl
import org.jetbrains.plugins.scala.project.{ProjectContext, ProjectContextOwner, ProjectPsiElementExt, ScalaLanguageLevel}
import org.jetbrains.plugins.scala.settings.ScalaProjectSettings
import org.jetbrains.plugins.scala.util.{MultilineStringUtil, ScalaUtils}
import scala.collection.mutable.ArrayBuffer
import scala.collection.{Seq, mutable}
import scala.meta.intellij.MetaExpansionsManager
/**
* User: Alexander Podkhalyuzin
* Date: 23.06.2008
*/
abstract class ScalaAnnotator extends Annotator
with FunctionAnnotator with ScopeAnnotator
with ParametersAnnotator with ApplicationAnnotator
with AssignmentAnnotator with VariableDefinitionAnnotator
with TypedStatementAnnotator with PatternDefinitionAnnotator
with PatternAnnotator with ConstructorAnnotator
with OverridingAnnotator with ValueClassAnnotator
with ProjectContextOwner with DumbAware {
override def annotate(element: PsiElement, holder: AnnotationHolder) {
val typeAware = isAdvancedHighlightingEnabled(element) && !element.isInDottyModule
val (compiled, isInSources) = element.getContainingFile match {
case file: ScalaFile =>
val isInSources: Boolean = ScalaUtils.isUnderSources(file)
if (isInSources && (element eq file)) {
if (typeAware) Stats.trigger("scala.file.with.type.aware.annotated")
else Stats.trigger("scala.file.without.type.aware.annotated")
}
(file.isCompiled, isInSources)
case _ => (false, false)
}
val visitor = new ScalaElementVisitor {
private def expressionPart(expr: ScExpression) {
if (!compiled) {
checkExpressionType(expr, holder, typeAware)
checkExpressionImplicitParameters(expr, holder)
ByNameParameter.annotate(expr, holder, typeAware)
}
if (isAdvancedHighlightingEnabled(element)) {
expr.getTypeAfterImplicitConversion() match {
case ExpressionTypeResult(Right(t), _, Some(implicitFunction)) =>
highlightImplicitView(expr, implicitFunction.element, t, expr, holder)
case _ =>
}
}
}
override def visitAnnotTypeElement(annot: ScAnnotTypeElement): Unit = {
super.visitAnnotTypeElement(annot)
}
override def visitParameterizedTypeElement(parameterized: ScParameterizedTypeElement) {
val typeParamOwner = parameterized.typeElement.getTypeNoConstructor.toOption
.flatMap(_.extractDesignated(expandAliases = false))
.collect {case t: ScTypeParametersOwner => t}
typeParamOwner.foreach { t =>
val typeParametersLength = t.typeParameters.length
val argsLength = parameterized.typeArgList.typeArgs.length
if (typeParametersLength != argsLength) {
val error = "Wrong number of type parameters. Expected: " + typeParametersLength + ", actual: " + argsLength
val leftBracket = parameterized.typeArgList.getNode.findChildByType(ScalaTokenTypes.tLSQBRACKET)
if (leftBracket != null) {
val annotation = holder.createErrorAnnotation(leftBracket, error)
annotation.setHighlightType(ProblemHighlightType.ERROR)
}
val rightBracket = parameterized.typeArgList.getNode.findChildByType(ScalaTokenTypes.tRSQBRACKET)
if (rightBracket != null) {
val annotation = holder.createErrorAnnotation(rightBracket, error)
annotation.setHighlightType(ProblemHighlightType.ERROR)
}
}
}
super.visitParameterizedTypeElement(parameterized)
}
override def visitExpression(expr: ScExpression) {
expressionPart(expr)
super.visitExpression(expr)
}
override def visitMacroDefinition(fun: ScMacroDefinition): Unit = {
if (isInSources) Stats.trigger("scala.macro.definition")
super.visitMacroDefinition(fun)
}
override def visitReferenceExpression(ref: ScReferenceExpression) {
referencePart(ref)
visitExpression(ref)
}
override def visitEnumerator(enum: ScEnumerator) {
enum.valKeyword match {
case Some(valKeyword) =>
val annotation = holder.createWarningAnnotation(valKeyword, ScalaBundle.message("enumerator.val.keyword.deprecated"))
annotation.setHighlightType(ProblemHighlightType.LIKE_DEPRECATED)
annotation.registerFix(new RemoveValFromEnumeratorIntentionAction(enum))
case _ =>
}
super.visitEnumerator(enum)
}
override def visitGenerator(gen: ScGenerator) {
gen.valKeyword match {
case Some(valKeyword) =>
val annotation = holder.createWarningAnnotation(valKeyword, ScalaBundle.message("generator.val.keyword.removed"))
annotation.setHighlightType(ProblemHighlightType.LIKE_UNKNOWN_SYMBOL)
annotation.registerFix(new RemoveValFromGeneratorIntentionAction(gen))
case _ =>
}
super.visitGenerator(gen)
}
override def visitGenericCallExpression(call: ScGenericCall) {
//todo: if (typeAware) checkGenericCallExpression(call, holder)
super.visitGenericCallExpression(call)
}
override def visitTypeElement(te: ScTypeElement) {
checkTypeElementForm(te, holder)
super.visitTypeElement(te)
}
override def visitLiteral(l: ScLiteral) {
l match {
case interpolated: ScInterpolatedStringLiteral if l.getFirstChild != null =>
highlightWrongInterpolatedString(interpolated, holder)
case _ if l.getFirstChild.getNode.getElementType == ScalaTokenTypes.tINTEGER => // the literal is a tINTEGER
checkIntegerLiteral(l, holder)
case _ =>
}
if (MultilineStringUtil.isTooLongStringLiteral(l)) {
holder.createErrorAnnotation(l, ScalaBundle.message("too.long.string.literal"))
}
super.visitLiteral(l)
}
override def visitAnnotation(annotation: ScAnnotation) {
checkAnnotationType(annotation, holder)
checkMetaAnnotation(annotation, holder)
PrivateBeanProperty.annotate(annotation, holder)
super.visitAnnotation(annotation)
}
override def visitForExpression(expr: ScForStatement) {
registerUsedImports(expr, ScalaPsiUtil.getExprImports(expr))
super.visitForExpression(expr)
}
override def visitVariableDefinition(varr: ScVariableDefinition) {
annotateVariableDefinition(varr, holder, typeAware)
super.visitVariableDefinition(varr)
}
override def visitVariableDeclaration(varr: ScVariableDeclaration) {
checkAbstractMemberPrivateModifier(varr, varr.declaredElements.map(_.nameId), holder)
super.visitVariableDeclaration(varr)
}
override def visitTypedStmt(stmt: ScTypedStmt) {
annotateTypedStatement(stmt, holder, typeAware)
super.visitTypedStmt(stmt)
}
override def visitPatternDefinition(pat: ScPatternDefinition) {
if (!compiled) {
annotatePatternDefinition(pat, holder, typeAware)
}
super.visitPatternDefinition(pat)
}
override def visitPattern(pat: ScPattern) {
annotatePattern(pat, holder, typeAware)
super.visitPattern(pat)
}
override def visitMethodCallExpression(call: ScMethodCall) {
registerUsedImports(call, call.getImportsUsed)
if (typeAware) annotateMethodInvocation(call, holder)
super.visitMethodCallExpression(call)
}
override def visitInfixExpression(infix: ScInfixExpr): Unit = {
if (typeAware) annotateMethodInvocation(infix, holder)
super.visitInfixExpression(infix)
}
override def visitSelfInvocation(self: ScSelfInvocation) {
checkSelfInvocation(self, holder)
super.visitSelfInvocation(self)
}
override def visitConstrBlock(constr: ScConstrBlock) {
annotateAuxiliaryConstructor(constr, holder)
super.visitConstrBlock(constr)
}
override def visitParameter(parameter: ScParameter) {
annotateParameter(parameter, holder)
super.visitParameter(parameter)
}
override def visitCatchBlock(c: ScCatchBlock) {
checkCatchBlockGeneralizedRule(c, holder, typeAware)
super.visitCatchBlock(c)
}
override def visitFunctionDefinition(fun: ScFunctionDefinition) {
if (!compiled && !fun.isConstructor)
annotateFunction(fun, holder, typeAware)
super.visitFunctionDefinition(fun)
}
override def visitFunctionDeclaration(fun: ScFunctionDeclaration) {
checkAbstractMemberPrivateModifier(fun, Seq(fun.nameId), holder)
super.visitFunctionDeclaration(fun)
}
override def visitFunction(fun: ScFunction) {
if (typeAware && !compiled && fun.getParent.isInstanceOf[ScTemplateBody]) {
checkOverrideMethods(fun, holder, isInSources)
}
if (!fun.isConstructor) checkFunctionForVariance(fun, holder)
super.visitFunction(fun)
}
override def visitAssignmentStatement(stmt: ScAssignStmt) {
annotateAssignment(stmt, holder, typeAware)
super.visitAssignmentStatement(stmt)
}
override def visitTypeProjection(proj: ScTypeProjection) {
referencePart(proj)
visitTypeElement(proj)
}
override def visitUnderscoreExpression(under: ScUnderscoreSection) {
checkUnboundUnderscore(under, holder)
}
private def referencePart(ref: ScReferenceElement) {
if (typeAware) annotateReference(ref, holder)
ref.qualifier match {
case None => checkNotQualifiedReferenceElement(ref, holder)
case Some(_) => checkQualifiedReferenceElement(ref, holder)
}
}
override def visitReference(ref: ScReferenceElement) {
referencePart(ref)
super.visitReference(ref)
}
override def visitImportExpr(expr: ScImportExpr) {
checkImportExpr(expr, holder)
super.visitImportExpr(expr)
}
override def visitReturnStatement(ret: ScReturnStmt) {
checkExplicitTypeForReturnStatement(ret, holder)
super.visitReturnStatement(ret)
}
override def visitConstructor(constr: ScConstructor) {
if (typeAware) annotateConstructor(constr, holder)
super.visitConstructor(constr)
}
override def visitModifierList(modifierList: ScModifierList) {
ModifierChecker.checkModifiers(modifierList, holder)
super.visitModifierList(modifierList)
}
override def visitParameters(parameters: ScParameters) {
annotateParameters(parameters, holder)
super.visitParameters(parameters)
}
override def visitTypeDefinition(typedef: ScTypeDefinition) {
super.visitTypeDefinition(typedef)
}
override def visitExistentialTypeElement(exist: ScExistentialTypeElement): Unit = {
if (isInSources) Stats.trigger("scala.existential.type")
super.visitExistentialTypeElement(exist)
}
override def visitTypeAlias(alias: ScTypeAlias) {
if (typeAware && !compiled && alias.getParent.isInstanceOf[ScTemplateBody]) {
checkOverrideTypes(alias, holder)
}
if(!compoundType(alias)) checkBoundsVariance(alias, holder, alias.nameId, alias, checkTypeDeclaredSameBracket = false)
super.visitTypeAlias(alias)
}
override def visitVariable(varr: ScVariable) {
if (typeAware && !compiled && (varr.getParent.isInstanceOf[ScTemplateBody] ||
varr.getParent.isInstanceOf[ScEarlyDefinitions])) {
checkOverrideVars(varr, holder, isInSources)
}
varr.typeElement match {
case Some(typ) => checkBoundsVariance(varr, holder, typ, varr, checkTypeDeclaredSameBracket = false)
case _ =>
}
if (!childHasAnnotation(varr.typeElement, "uncheckedVariance")) {
checkValueAndVariableVariance(varr, Covariant, varr.declaredElements, holder)
checkValueAndVariableVariance(varr, Contravariant, varr.declaredElements, holder)
}
super.visitVariable(varr)
}
override def visitValueDeclaration(v: ScValueDeclaration) {
checkAbstractMemberPrivateModifier(v, v.declaredElements.map(_.nameId), holder)
super.visitValueDeclaration(v)
}
override def visitValue(v: ScValue) {
if (typeAware && !compiled && (v.getParent.isInstanceOf[ScTemplateBody] ||
v.getParent.isInstanceOf[ScEarlyDefinitions])) {
checkOverrideVals(v, holder, isInSources)
}
v.typeElement match {
case Some(typ) => checkBoundsVariance(v, holder, typ, v, checkTypeDeclaredSameBracket = false)
case _ =>
}
if (!childHasAnnotation(v.typeElement, "uncheckedVariance")) {
checkValueAndVariableVariance(v, Covariant, v.declaredElements, holder)
}
super.visitValue(v)
}
override def visitClassParameter(parameter: ScClassParameter) {
if (typeAware && !compiled) {
checkOverrideClassParameters(parameter, holder)
}
super.visitClassParameter(parameter)
}
override def visitClass(cl: ScClass): Unit = {
if (typeAware && ValueClassType.extendsAnyVal(cl)) annotateValueClass(cl, holder)
super.visitClass(cl)
}
}
annotateScope(element, holder)
element.accept(visitor)
AnnotatorHighlighter.highlightElement(element, holder)
element match {
case templateDefinition: ScTemplateDefinition =>
checkBoundsVariance(templateDefinition, holder, templateDefinition.nameId, templateDefinition.nameId, Covariant)
val tdParts = Seq(AbstractInstantiation, FinalClassInheritance, IllegalInheritance, ObjectCreationImpossible,
MultipleInheritance, NeedsToBeAbstract, NeedsToBeMixin, NeedsToBeTrait, SealedClassInheritance, UndefinedMember)
tdParts.foreach(_.annotate(templateDefinition, holder, typeAware))
templateDefinition match {
case cls: ScClass =>
val clsParts = Seq(CaseClassWithoutParamList)
clsParts.foreach(_.annotate(cls, holder, typeAware))
case trt: ScTrait =>
val traitParts = Seq(TraitHasImplicitBound)
traitParts.foreach(_.annotate(trt, holder, typeAware))
case _ =>
}
case _ =>
}
element match {
case sTypeParam: ScTypeBoundsOwner =>
checkTypeParamBounds(sTypeParam, holder)
case _ =>
}
//todo: super[ControlFlowInspections].annotate(element, holder)
}
private def checkMetaAnnotation(annotation: ScAnnotation, holder: AnnotationHolder): Unit = {
import ScalaProjectSettings.ScalaMetaMode
import scala.meta.intellij.psiExt._
if (annotation.isMetaMacro) {
if (!MetaExpansionsManager.isUpToDate(annotation)) {
val warning = holder.createWarningAnnotation(annotation, ScalaBundle.message("scala.meta.recompile"))
warning.registerFix(new RecompileAnnotationAction(annotation))
}
val result = annotation.parent.flatMap(_.parent) match {
case Some(ah: ScAnnotationsHolder) => ah.getMetaExpansion
case _ => Right("")
}
val settings = ScalaProjectSettings.getInstance(annotation.getProject)
result match {
case Left(errorMsg) if settings.getScalaMetaMode == ScalaMetaMode.Enabled =>
holder.createErrorAnnotation(annotation, ScalaBundle.message("scala.meta.expandfailed", errorMsg))
case _ =>
}
}
}
def isAdvancedHighlightingEnabled(element: PsiElement): Boolean = {
if (!HighlightingAdvisor.getInstance(element.getProject).enabled) return false
element.getContainingFile match {
case file: ScalaFile =>
if (file.isCompiled) return false
val vFile = file.getVirtualFile
if (vFile != null && ProjectFileIndex.SERVICE.getInstance(element.getProject).isInLibrarySource(vFile)) return false
case _ =>
}
val containingFile = element.getContainingFile
def calculate(): mutable.HashSet[TextRange] = {
val text = containingFile.getText
val indexes = new ArrayBuffer[Int]
var lastIndex = 0
while (text.indexOf("/*_*/", lastIndex) >= 0) {
lastIndex = text.indexOf("/*_*/", lastIndex) + 5
indexes += lastIndex
}
if (indexes.isEmpty) return mutable.HashSet.empty
if (indexes.length % 2 != 0) indexes += text.length
val res = new mutable.HashSet[TextRange]
for (i <- indexes.indices by 2) {
res += new TextRange(indexes(i), indexes(i + 1))
}
res
}
var data = containingFile.getUserData(ScalaAnnotator.ignoreHighlightingKey)
val count = containingFile.getManager.getModificationTracker.getModificationCount
if (data == null || data._1 != count) {
data = (count, calculate())
containingFile.putUserData(ScalaAnnotator.ignoreHighlightingKey, data)
}
val noCommentWhitespace = element.children.filter {
case _: PsiComment | _: PsiWhiteSpace => false
case _ => true
}
val offset =
noCommentWhitespace.headOption
.map(_.getTextOffset)
.getOrElse(element.getTextOffset)
data._2.forall(!_.contains(offset))
}
def checkCatchBlockGeneralizedRule(block: ScCatchBlock, holder: AnnotationHolder, typeAware: Boolean) {
block.expression match {
case Some(expr) =>
val tp = expr.`type`().getOrAny
val throwable = ScalaPsiManager.instance(expr.getProject).getCachedClass(expr.resolveScope, "java.lang.Throwable").orNull
if (throwable == null) return
val throwableType = ScDesignatorType(throwable)
def checkMember(memberName: String, checkReturnTypeIsBoolean: Boolean) {
val processor = new MethodResolveProcessor(expr, memberName, List(Seq(new Compatibility.Expression(throwableType))),
Seq.empty, Seq.empty)
processor.processType(tp, expr)
val candidates = processor.candidates
if (candidates.length != 1) {
val error = ScalaBundle.message("method.is.not.member", memberName, tp.presentableText)
val annotation = holder.createErrorAnnotation(expr, error)
annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR)
} else if (checkReturnTypeIsBoolean) {
val maybeType = candidates(0) match {
case ScalaResolveResult(fun: ScFunction, subst) => fun.returnType.map(subst.subst).toOption
case _ => None
}
if (!maybeType.exists(_.equiv(Boolean))) {
val error = ScalaBundle.message("expected.type.boolean", memberName)
val annotation = holder.createErrorAnnotation(expr, error)
annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR)
}
} else {
block.getContext match {
case t: ScTryStmt =>
t.expectedTypeEx(fromUnderscore = false) match {
case Some((tp: ScType, _)) if tp equiv Unit => //do nothing
case Some((tp: ScType, typeElement)) =>
val returnType = candidates(0) match {
case ScalaResolveResult(fun: ScFunction, subst) => fun.returnType.map(subst.subst)
case _ => return
}
val conformance = smartCheckConformance(Right(tp), returnType)
if (!conformance) {
if (typeAware) {
val (retTypeText, expectedTypeText) = ScTypePresentation.different(returnType.getOrNothing, tp)
val error = ScalaBundle.message("expr.type.does.not.conform.expected.type", retTypeText, expectedTypeText)
val annotation: Annotation = holder.createErrorAnnotation(expr, error)
annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR_OR_WARNING)
typeElement match {
//Don't highlight te if it's outside of original file.
case Some(te) if te.containingFile == t.containingFile =>
val fix = new ChangeTypeFix(te, returnType.getOrNothing)
annotation.registerFix(fix)
val teAnnotation = annotationWithoutHighlighting(holder, te)
teAnnotation.registerFix(fix)
case _ =>
}
}
}
case _ => //do nothing
}
case _ =>
}
}
}
checkMember("isDefinedAt", checkReturnTypeIsBoolean = true)
checkMember("apply", checkReturnTypeIsBoolean = false)
case _ =>
}
}
private def annotationWithoutHighlighting(holder: AnnotationHolder, te: PsiElement): Annotation = {
val teAnnotation = holder.createErrorAnnotation(te, null)
teAnnotation.setHighlightType(ProblemHighlightType.INFORMATION)
val emptyAttr = new TextAttributes()
teAnnotation.setEnforcedTextAttributes(emptyAttr)
teAnnotation
}
private def checkTypeParamBounds(sTypeParam: ScTypeBoundsOwner, holder: AnnotationHolder) {
for {
lower <- sTypeParam.lowerBound.toOption
upper <- sTypeParam.upperBound.toOption
if !lower.conforms(upper)
annotation = holder.createErrorAnnotation(sTypeParam,
ScalaBundle.message("lower.bound.conform.to.upper", upper, lower))
} annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR)
}
private def registerUsedElement(element: PsiElement, resolveResult: ScalaResolveResult,
checkWrite: Boolean) {
val named = resolveResult.getActualElement match {
case isLightScNamedElement(e) => e
case e => e
}
val file = element.getContainingFile
if (named.isValid && named.getContainingFile == file &&
!PsiTreeUtil.isAncestor(named, element, true)) { //to filter recursive usages
val value: ValueUsed = element match {
case ref: ScReferenceExpression if checkWrite &&
ScalaPsiUtil.isPossiblyAssignment(ref.asInstanceOf[PsiElement]) => WriteValueUsed(named)
case _ => ReadValueUsed(named)
}
val holder = ScalaRefCountHolder.getInstance(file)
holder.registerValueUsed(value)
// For use of unapply method, see SCL-3463
resolveResult.parentElement.foreach(parent => holder.registerValueUsed(ReadValueUsed(parent)))
}
}
def checkBoundsVariance(toCheck: PsiElement, holder: AnnotationHolder, toHighlight: PsiElement, checkParentOf: PsiElement,
upperV: Variance = Covariant, checkTypeDeclaredSameBracket: Boolean = true, insideParameterized: Boolean = false) {
toCheck match {
case boundOwner: ScTypeBoundsOwner =>
checkAndHighlightBounds(boundOwner.upperTypeElement, upperV)
checkAndHighlightBounds(boundOwner.lowerTypeElement, -upperV)
case _ =>
}
toCheck match {
case paramOwner: ScTypeParametersOwner =>
val inParameterized = if (paramOwner.isInstanceOf[ScTemplateDefinition]) false else true
for (param <- paramOwner.typeParameters) {
checkBoundsVariance(param, holder, param.nameId, checkParentOf, -upperV, insideParameterized = inParameterized)
}
case _ =>
}
def checkAndHighlightBounds(boundOption: Option[ScTypeElement], expectedVariance: Variance) {
boundOption match {
case Some(bound) if !childHasAnnotation(Some(bound), "uncheckedVariance") =>
checkVariance(bound.calcType, expectedVariance, toHighlight, checkParentOf, holder, checkTypeDeclaredSameBracket, insideParameterized)
case _ =>
}
}
}
private def checkNotQualifiedReferenceElement(refElement: ScReferenceElement, holder: AnnotationHolder) {
refElement match {
case _: ScInterpolatedStringPartReference =>
return //do not inspect interpolated literal, it will be highlighted in other place
case _ =>
}
def getFixes: Seq[IntentionAction] = {
val classes = ScalaImportTypeFix.getTypesToImport(refElement, refElement.getProject)
if (classes.length == 0) return Seq.empty
Seq[IntentionAction](new ScalaImportTypeFix(classes, refElement))
}
val resolve: Array[ResolveResult] = refElement.multiResolve(false)
def processError(countError: Boolean, fixes: => Seq[IntentionAction]) {
//todo remove when resolve of unqualified expression will be fully implemented
if (refElement.getManager.isInProject(refElement) && resolve.length == 0 &&
(fixes.nonEmpty || countError)) {
val error = ScalaBundle.message("cannot.resolve", refElement.refName)
val annotation = holder.createErrorAnnotation(refElement.nameId, error)
annotation.setHighlightType(ProblemHighlightType.LIKE_UNKNOWN_SYMBOL)
registerAddFixes(refElement, annotation, fixes: _*)
annotation.registerFix(ReportHighlightingErrorQuickFix)
registerCreateFromUsageFixesFor(refElement, annotation)
}
}
if (refElement.isSoft) {
return
}
val goodDoc = refElement.isInstanceOf[ScDocResolvableCodeReference] && resolve.length > 1
if (resolve.length != 1 && !goodDoc) {
if (resolve.length == 0) { //Let's try to hide dynamic named parameter usage
refElement match {
case e: ScReferenceExpression =>
e.getContext match {
case a: ScAssignStmt if a.getLExpression == e && a.isDynamicNamedAssignment => return
case _ =>
}
case _ =>
}
}
refElement match {
case e: ScReferenceExpression if e.getParent.isInstanceOf[ScPrefixExpr] &&
e.getParent.asInstanceOf[ScPrefixExpr].operation == e => //todo: this is hide !(Not Boolean)
case e: ScReferenceExpression if e.getParent.isInstanceOf[ScInfixExpr] &&
e.getParent.asInstanceOf[ScInfixExpr].operation == e => //todo: this is hide A op B
case _: ScReferenceExpression => processError(countError = false, fixes = getFixes)
case e: ScStableCodeReferenceElement if e.getParent.isInstanceOf[ScInfixPattern] &&
e.getParent.asInstanceOf[ScInfixPattern].reference == e => //todo: this is hide A op B in patterns
case _ => refElement.getParent match {
case _: ScImportSelector if resolve.length > 0 =>
case _ => processError(countError = true, fixes = getFixes)
}
}
} else {
AnnotatorHighlighter.highlightReferenceElement(refElement, holder)
def showError(): Unit = {
val error = ScalaBundle.message("forward.reference.detected")
val annotation = holder.createErrorAnnotation(refElement.nameId, error)
annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR_OR_WARNING)
}
refElement.getContainingFile match {
case file: ScalaFile if !file.allowsForwardReferences =>
resolve(0) match {
case r: ScalaResolveResult if r.isForwardReference =>
ScalaPsiUtil.nameContext(r.getActualElement) match {
case v: ScValue if !v.hasModifierProperty("lazy") => showError()
case _: ScVariable => showError()
case nameContext =>
//if it has not lazy val or var between reference and statement then it's forward reference
val context = PsiTreeUtil.findCommonContext(refElement, nameContext)
if (context != null) {
val neighbour = (PsiTreeUtil.findFirstContext(nameContext, false, new Condition[PsiElement] {
override def value(elem: PsiElement): Boolean = elem.getContext.eq(context)
}) match {
case s: ScalaPsiElement => s.getDeepSameElementInContext
case elem => elem
}).getPrevSibling
def check(neighbour: PsiElement): Boolean = {
if (neighbour == null ||
neighbour.getTextRange.getStartOffset <= refElement.getTextRange.getStartOffset) return false
neighbour match {
case v: ScValue if !v.hasModifierProperty("lazy") => true
case _: ScVariable => true
case _ => check(neighbour.getPrevSibling)
}
}
if (check(neighbour)) showError()
}
}
case _ =>
}
case _ =>
}
}
for {
result <- resolve if result.isInstanceOf[ScalaResolveResult]
scalaResult = result.asInstanceOf[ScalaResolveResult]
} {
registerUsedImports(refElement, scalaResult)
registerUsedElement(refElement, scalaResult, checkWrite = true)
}
checkAccessForReference(resolve, refElement, holder)
if (resolve.length == 1) {
val resolveResult = resolve(0).asInstanceOf[ScalaResolveResult]
refElement match {
case e: ScReferenceExpression if e.getParent.isInstanceOf[ScPrefixExpr] &&
e.getParent.asInstanceOf[ScPrefixExpr].operation == e =>
resolveResult.implicitFunction match {
case Some(fun) =>
val pref = e.getParent.asInstanceOf[ScPrefixExpr]
val expr = pref.operand
highlightImplicitMethod(expr, resolveResult, refElement, fun, holder)
case _ =>
}
case e: ScReferenceExpression if e.getParent.isInstanceOf[ScInfixExpr] &&
e.getParent.asInstanceOf[ScInfixExpr].operation == e =>
resolveResult.implicitFunction match {
case Some(fun) =>
val inf = e.getParent.asInstanceOf[ScInfixExpr]
val expr = inf.getBaseExpr
highlightImplicitMethod(expr, resolveResult, refElement, fun, holder)
case _ =>
}
case _ =>
}
}
if (isAdvancedHighlightingEnabled(refElement) && resolve.length != 1 && !goodDoc) {
val parent = refElement.getParent
def addCreateApplyOrUnapplyFix(messageKey: String, fix: ScTypeDefinition => IntentionAction): Boolean = {
val refWithoutArgs = ScalaPsiElementFactory.createReferenceFromText(refElement.getText, parent.getContext, parent)
if (refWithoutArgs != null && refWithoutArgs.multiResolve(false).exists(!_.getElement.isInstanceOf[PsiPackage])) {
// We can't resolve the method call A(arg1, arg2), but we can resolve A. Highlight this differently.
val error = ScalaBundle.message(messageKey, refElement.refName)
val annotation = holder.createErrorAnnotation(refElement.nameId, error)
annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR)
annotation.registerFix(ReportHighlightingErrorQuickFix)
refWithoutArgs match {
case ResolvesTo(obj: ScObject) => annotation.registerFix(fix(obj))
case InstanceOfClass(td: ScTypeDefinition) => annotation.registerFix(fix(td))
case _ =>
}
true
} else false
}
parent match {
case _: ScImportSelector if resolve.length > 0 => return
case mc: ScMethodCall =>
val messageKey = "cannot.resolve.apply.method"
if (addCreateApplyOrUnapplyFix(messageKey, td => new CreateApplyQuickFix(td, mc))) return
case Both(p: ScPattern, (_: ScConstructorPattern | _: ScInfixPattern)) =>
val messageKey = "cannot.resolve.unapply.method"
if (addCreateApplyOrUnapplyFix(messageKey, td => new CreateUnapplyQuickFix(td, p))) return
case scalaDocTag: ScDocTag if scalaDocTag.getName == MyScaladocParsing.THROWS_TAG => return //see SCL-9490
case _ =>
}
val error = ScalaBundle.message("cannot.resolve", refElement.refName)
val annotation = holder.createErrorAnnotation(refElement.nameId, error)
annotation.setHighlightType(ProblemHighlightType.LIKE_UNKNOWN_SYMBOL)
annotation.registerFix(ReportHighlightingErrorQuickFix)
registerCreateFromUsageFixesFor(refElement, annotation)
if (PsiTreeUtil.getParentOfType(refElement, classOf[ScImportExpr]) != null)
annotation.registerFix(new AddSbtDependencyFix(SmartPointerManager.getInstance(refElement.getProject).createSmartPsiElementPointer(refElement)))
}
}
private def highlightImplicitMethod(expr: ScExpression, resolveResult: ScalaResolveResult, refElement: ScReferenceElement,
fun: PsiNamedElement, holder: AnnotationHolder) {
val typeTo = resolveResult.implicitType match {
case Some(tp) => tp
case _ => Any
}
highlightImplicitView(expr, fun, typeTo, refElement.nameId, holder)
}
private def highlightImplicitView(expr: ScExpression, fun: PsiNamedElement, typeTo: ScType,
elementToHighlight: PsiElement, holder: AnnotationHolder) {
val range = elementToHighlight.getTextRange
val annotation: Annotation = holder.createInfoAnnotation(range, null)
annotation.setTextAttributes(DefaultHighlighter.IMPLICIT_CONVERSIONS)
annotation.setAfterEndOfLine(false)
}
private def checkSelfInvocation(self: ScSelfInvocation, holder: AnnotationHolder) {
self.bind match {
case Some(_) =>
case None =>
if (isAdvancedHighlightingEnabled(self)) {
val annotation: Annotation = holder.createErrorAnnotation(self.thisElement,
"Cannot find constructor for this call")
annotation.setHighlightType(ProblemHighlightType.LIKE_UNKNOWN_SYMBOL)
}
}
}
private def checkQualifiedReferenceElement(refElement: ScReferenceElement, holder: AnnotationHolder) {
AnnotatorHighlighter.highlightReferenceElement(refElement, holder)
var resolve: Array[ResolveResult] = null
resolve = refElement.multiResolve(false)
for (result <- resolve if result.isInstanceOf[ScalaResolveResult];
scalaResult = result.asInstanceOf[ScalaResolveResult]) {
registerUsedImports(refElement, scalaResult)
registerUsedElement(refElement, scalaResult, checkWrite = true)
}
checkAccessForReference(resolve, refElement, holder)
if (refElement.isInstanceOf[ScExpression] &&
resolve.length == 1) {
val resolveResult = resolve(0).asInstanceOf[ScalaResolveResult]
resolveResult.implicitFunction match {
case Some(fun) =>
val qualifier = refElement.qualifier.get
val expr = qualifier.asInstanceOf[ScExpression]
highlightImplicitMethod(expr, resolveResult, refElement, fun, holder)
case _ =>
}
}
if (refElement.isInstanceOf[ScDocResolvableCodeReference] && resolve.length > 0 || refElement.isSoft) return
if (isAdvancedHighlightingEnabled(refElement) && resolve.length != 1) {
if (resolve.count(_.isInstanceOf[ScalaResolveResult]) == 1) {
return
}
refElement.getParent match {
case _: ScImportSelector | _: ScImportExpr if resolve.length > 0 => return
case _ =>
}
val error = ScalaBundle.message("cannot.resolve", refElement.refName)
val annotation = holder.createErrorAnnotation(refElement.nameId, error)
annotation.setHighlightType(ProblemHighlightType.LIKE_UNKNOWN_SYMBOL)
annotation.registerFix(ReportHighlightingErrorQuickFix)
registerCreateFromUsageFixesFor(refElement, annotation)
if (PsiTreeUtil.getParentOfType(refElement, classOf[ScImportExpr]) != null)
annotation.registerFix(new AddSbtDependencyFix(SmartPointerManager.getInstance(refElement.getProject).createSmartPsiElementPointer(refElement)))
}
}
private def checkAccessForReference(resolve: Array[ResolveResult], refElement: ScReferenceElement, holder: AnnotationHolder) {
if (resolve.length != 1 || refElement.isSoft || refElement.isInstanceOf[ScDocResolvableCodeReferenceImpl]) return
resolve(0) match {
case r: ScalaResolveResult if !r.isAccessible =>
val error = "Symbol %s is inaccessible from this place".format(r.element.name)
val annotation = holder.createErrorAnnotation(refElement.nameId, error)
annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR_OR_WARNING)
//todo: add fixes
case _ =>
}
}
private def highlightWrongInterpolatedString(l: ScInterpolatedStringLiteral, holder: AnnotationHolder) {
val ref = l.findReferenceAt(0) match {
case r: ScInterpolatedStringPartReference => r
case _ => return
}
val prefix = l.getFirstChild
val injections = l.getInjections
def annotateBadPrefix(key: String) {
val annotation = holder.createErrorAnnotation(prefix.getTextRange,
ScalaBundle.message(key, prefix.getText))
annotation.setHighlightType(ProblemHighlightType.LIKE_UNKNOWN_SYMBOL)
}
def annotateDesugared(): Unit = {
val elementsMap = mutable.HashMap[Int, PsiElement]()
val params = new mutable.StringBuilder("(")
injections.foreach { i =>
elementsMap += params.length -> i
params.append(i.getText).append(",")
}
if (injections.length > 0) params.setCharAt(params.length - 1, ')') else params.append(')')
val (expr, ref, shift) = l.getStringContextExpression match {
case Some(mc @ ScMethodCall(invoked: ScReferenceExpression, _)) =>
val shift = invoked.getTextRange.getEndOffset
(mc, invoked, shift)
case _ => return
}
val fakeAnnotator = new AnnotationHolderImpl(new AnnotationSession(expr.getContainingFile)) {
override def createErrorAnnotation(elt: PsiElement, message: String): Annotation =
createErrorAnnotation(elt.getTextRange, message)
override def createErrorAnnotation(range: TextRange, message: String): Annotation = {
holder.createErrorAnnotation(elementsMap.getOrElse(range.getStartOffset - shift, prefix), message)
}
}
annotateReference(ref, fakeAnnotator)
}
ref.bind() match {
case Some(srr) =>
registerUsedImports(ref, srr)
annotateDesugared()
case None =>
annotateBadPrefix("cannot.resolve.in.StringContext")
}
}
private def registerAddFixes(refElement: ScReferenceElement, annotation: Annotation, actions: IntentionAction*) {
for (action <- actions) {
annotation.registerFix(action)
}
}
private def checkExpressionType(expr: ScExpression, holder: AnnotationHolder, typeAware: Boolean): Unit = {
def checkExpressionTypeInner(fromUnderscore: Boolean) {
val ExpressionTypeResult(exprType, importUsed, implicitFunction) =
expr.getTypeAfterImplicitConversion(expectedOption = expr.smartExpectedType(fromUnderscore), fromUnderscore = fromUnderscore)
registerUsedImports(expr, importUsed)
expr match {
case _: ScMatchStmt =>
case bl: ScBlock if bl.lastStatement.isDefined =>
case i: ScIfStmt if i.elseBranch.isDefined =>
case _: ScFunctionExpr =>
case _: ScTryStmt =>
case _ =>
expr.getParent match {
case a: ScAssignStmt if a.getRExpression.contains(expr) && a.isDynamicNamedAssignment => return
case _: ScArgumentExprList => return
case inf: ScInfixExpr if inf.getArgExpr == expr => return
case tuple: ScTuple if tuple.getContext.isInstanceOf[ScInfixExpr] &&
tuple.getContext.asInstanceOf[ScInfixExpr].getArgExpr == tuple => return
case e: ScParenthesisedExpr if e.getContext.isInstanceOf[ScInfixExpr] &&
e.getContext.asInstanceOf[ScInfixExpr].getArgExpr == e => return
case t: ScTypedStmt if t.isSequenceArg => return
case parent@(_: ScTuple | _: ScParenthesisedExpr) =>
parent.getParent match {
case inf: ScInfixExpr if inf.getArgExpr == parent => return
case _ =>
}
case param: ScParameter =>
if (!param.isDefaultParam) return //performance optimization
param.getRealParameterType match {
case Right(paramType) if paramType.extractClass.isDefined =>
//do not check generic types. See SCL-3508
case _ => return
}
case ass: ScAssignStmt if ass.isNamedParameter => return //that's checked in application annotator
case _ =>
}
expr.expectedTypeEx(fromUnderscore) match {
case Some((tp: ScType, _)) if tp equiv Unit => //do nothing
case Some((tp: ScType, typeElement)) =>
val expectedType = Right(tp)
implicitFunction match {
case Some(_) =>
//todo:
/*val typeFrom = expr.getType(TypingContext.empty).getOrElse(Any)
val typeTo = exprType.getOrElse(Any)
val exprText = expr.getText
val range = expr.getTextRange
showImplicitUsageAnnotation(exprText, typeFrom, typeTo, fun, range, holder,
EffectType.LINE_UNDERSCORE, Color.LIGHT_GRAY)*/
case None => //do nothing
}
val conformance = smartCheckConformance(expectedType, exprType)
if (!conformance) {
if (typeAware) {
val markedPsi = (expr, expr.getParent) match {
case (b: ScBlockExpr, _) => b.getRBrace.map(_.getPsi).getOrElse(expr)
case (_, b: ScBlockExpr) => b.getRBrace.map(_.getPsi).getOrElse(expr)
case _ => expr
}
val (exprTypeText, expectedTypeText) = ScTypePresentation.different(exprType.getOrNothing, tp)
val error = ScalaBundle.message("expr.type.does.not.conform.expected.type", exprTypeText, expectedTypeText)
val annotation: Annotation = holder.createErrorAnnotation(markedPsi, error)
annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR_OR_WARNING)
if (WrapInOptionQuickFix.isAvailable(expr, expectedType, exprType)) {
val wrapInOptionFix = new WrapInOptionQuickFix(expr, expectedType, exprType)
annotation.registerFix(wrapInOptionFix)
}
if (AddBreakoutQuickFix.isAvailable(expr)) {
annotation.registerFix(new AddBreakoutQuickFix(expr))
}
typeElement match {
case Some(te) if te.getContainingFile == expr.getContainingFile =>
val fix = new ChangeTypeFix(te, exprType.getOrNothing)
annotation.registerFix(fix)
val teAnnotation = annotationWithoutHighlighting(holder, te)
teAnnotation.registerFix(fix)
case _ =>
}
}
}
case _ => //do nothing
}
}
}
if (ScUnderScoreSectionUtil.isUnderscoreFunction(expr)) {
checkExpressionTypeInner(fromUnderscore = true)
}
checkExpressionTypeInner(fromUnderscore = false)
}
private def checkExpressionImplicitParameters(expr: ScExpression, holder: AnnotationHolder) {
expr.findImplicitParameters match {
case Some(seq) =>
for (resolveResult <- seq if resolveResult != null) {
registerUsedImports(expr, resolveResult)
registerUsedElement(expr, resolveResult, checkWrite = false)
}
case _ =>
}
}
private def checkUnboundUnderscore(under: ScUnderscoreSection, holder: AnnotationHolder) {
if (under.getText == "_") {
under.parentOfType(classOf[ScVariableDefinition], strict = false).foreach {
case varDef @ ScVariableDefinition.expr(_) if varDef.expr.contains(under) =>
if (varDef.containingClass == null) {
val error = ScalaBundle.message("local.variables.must.be.initialized")
val annotation: Annotation = holder.createErrorAnnotation(under, error)
annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR_OR_WARNING)
} else if (varDef.typeElement.isEmpty) {
val error = ScalaBundle.message("unbound.placeholder.parameter")
val annotation: Annotation = holder.createErrorAnnotation(under, error)
annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR_OR_WARNING)
}
case _ =>
// TODO SCL-2610 properly detect unbound placeholders, e.g. ( { _; (_: Int) } ) and report them.
// val error = ScalaBundle.message("unbound.placeholder.parameter")
// val annotation: Annotation = holder.createErrorAnnotation(under, error)
// annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR_OR_WARNING)
}
}
}
private def checkExplicitTypeForReturnStatement(statement: ScReturnStmt, holder: AnnotationHolder): Unit = {
val function = statement.returnFunction.getOrElse {
val error = ScalaBundle.message("return.outside.method.definition")
val annotation: Annotation = holder.createErrorAnnotation(statement.returnKeyword, error)
annotation.setHighlightType(ProblemHighlightType.LIKE_UNKNOWN_SYMBOL)
return
}
function.returnType match {
case Right(tp) if function.hasAssign && !tp.equiv(Unit) =>
val importUsed = statement.expr.toSet[ScExpression]
.flatMap(_.getTypeAfterImplicitConversion().importsUsed)
registerUsedImports(statement, importUsed)
case _ =>
}
}
private def checkImportExpr(impExpr: ScImportExpr, holder: AnnotationHolder) {
if (impExpr.qualifier == null) {
val annotation: Annotation = holder.createErrorAnnotation(impExpr.getTextRange,
ScalaBundle.message("import.expr.should.be.qualified"))
annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR)
}
}
private def checkTypeElementForm(typeElement: ScTypeElement, holder: AnnotationHolder) {
//todo: check bounds conformance for parameterized type
typeElement match {
case simpleTypeElement: ScSimpleTypeElement =>
checkAbsentTypeArgs(simpleTypeElement, holder)
simpleTypeElement.findImplicitParameters match {
case Some(parameters) =>
for (r <- parameters if r != null) {
registerUsedImports(typeElement, r)
}
case _ =>
}
case _ =>
}
}
private def checkAbsentTypeArgs(simpleTypeElement: ScSimpleTypeElement, holder: AnnotationHolder): Unit = {
// Dirty hack(see SCL-12582): we shouldn't complain about missing type args since they will be added by a macro after expansion
def isFreestyleAnnotated(ah: ScAnnotationsHolder): Boolean = {
(ah.findAnnotationNoAliases("freestyle.free") != null) ||
ah.findAnnotationNoAliases("freestyle.module") != null
}
def needTypeArgs: Boolean = {
def noHigherKinds(owner: ScTypeParametersOwner) = !owner.typeParameters.exists(_.typeParameters.nonEmpty)
val canHaveTypeArgs = simpleTypeElement.reference.map(_.resolve()).exists {
case ah: ScAnnotationsHolder if isFreestyleAnnotated(ah) => false
case c: PsiClass => c.hasTypeParameters
case owner: ScTypeParametersOwner => owner.typeParameters.nonEmpty
case _ => false
}
if (!canHaveTypeArgs) return false
simpleTypeElement.getParent match {
case ScParameterizedTypeElement(`simpleTypeElement`, _) => false
case tp: ScTypeParam if tp.contextBoundTypeElement.contains(simpleTypeElement) => false
case (_: ScTypeArgs) childOf (gc: ScGenericCall) =>
gc.referencedExpr match {
case ResolvesTo(f: ScFunction) => noHigherKinds(f)
case _ => false
}
case (_: ScTypeArgs) childOf (parameterized: ScParameterizedTypeElement) =>
parameterized.typeElement match {
case ScSimpleTypeElement(Some(ResolvesTo(owner: ScTypeParametersOwner))) => noHigherKinds(owner)
case ScSimpleTypeElement(Some(ResolvesTo(ScPrimaryConstructor.ofClass(c)))) => noHigherKinds(c)
case _ => false
}
case infix: ScReferenceableInfixTypeElement if infix.leftTypeElement == simpleTypeElement || infix.rightTypeElement.contains(simpleTypeElement) =>
infix.reference.resolve() match {
case owner: ScTypeParametersOwner => noHigherKinds(owner)
case _ => false
}
case _ => true
}
}
if (needTypeArgs) {
val annotation = holder.createErrorAnnotation(simpleTypeElement.getTextRange,
ScalaBundle.message("type.takes.type.parameters", simpleTypeElement.getText))
annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR)
}
}
private def checkAnnotationType(annotation: ScAnnotation, holder: AnnotationHolder) {
//todo: check annotation is inheritor for class scala.Annotation
}
def childHasAnnotation(teOption: Option[ScTypeElement], annotation: String): Boolean = teOption match {
case Some(te) => te.breadthFirst().exists {
case annot: ScAnnotationExpr =>
annot.constr.reference match {
case Some(ref) => Option(ref.resolve()) match {
case Some(res: PsiNamedElement) => res.getName == annotation
case _ => false
}
case _ => false
}
case _ => false
}
case _ => false
}
private def checkFunctionForVariance(fun: ScFunction, holder: AnnotationHolder) {
if (!modifierIsThis(fun) && !compoundType(fun)) { //if modifier contains [this] or if it is a compound type we do not highlight it
checkBoundsVariance(fun, holder, fun.nameId, fun.getParent)
if (!childHasAnnotation(fun.returnTypeElement, "uncheckedVariance")) {
fun.returnType match {
case Right(returnType) =>
checkVariance(ScalaType.expandAliases(returnType).getOrElse(returnType), Covariant, fun.nameId,
fun.getParent, holder)
case _ =>
}
}
for (parameter <- fun.parameters) {
parameter.typeElement match {
case Some(te) if !childHasAnnotation(Some(te), "uncheckedVariance") =>
checkVariance(ScalaType.expandAliases(te.calcType).getOrElse(te.calcType), Contravariant,
parameter.nameId, fun.getParent, holder)
case _ =>
}
}
}
}
def checkValueAndVariableVariance(toCheck: ScDeclaredElementsHolder, variance: Variance,
declaredElements: Seq[Typeable with ScNamedElement], holder: AnnotationHolder) {
if (!modifierIsThis(toCheck)) {
for (element <- declaredElements) {
element.`type`() match {
case Right(tp) =>
ScalaType.expandAliases(tp) match {
//so type alias is highlighted
case Right(newTp) => checkVariance(newTp, variance, element.nameId, toCheck, holder)
case _ => checkVariance(tp, variance, element.nameId, toCheck, holder)
}
case _ =>
}
}
}
}
def modifierIsThis(toCheck: PsiElement): Boolean = {
toCheck match {
case modifierOwner: ScModifierListOwner =>
Option(modifierOwner.getModifierList).flatMap(_.accessModifier).exists(_.isThis)
case _ => false
}
}
def compoundType(toCheck: PsiElement): Boolean = {
toCheck.getParent.getParent match {
case _: ScCompoundTypeElement => true
case _ => false
}
}
//fix for SCL-807
private def checkVariance(typeParam: ScType, variance: Variance, toHighlight: PsiElement, checkParentOf: PsiElement,
holder: AnnotationHolder, checkIfTypeIsInSameBrackets: Boolean = false, insideParameterized: Boolean = false) = {
def highlightVarianceError(elementV: Variance, positionV: Variance, name: String) = {
if (positionV != elementV && elementV != Invariant) {
val pos =
if (toHighlight.isInstanceOf[ScVariable]) toHighlight.getText + "_="
else toHighlight.getText
val place = if (toHighlight.isInstanceOf[ScFunction]) "method" else "value"
val elementVariance = elementV.name
val posVariance = positionV.name
val annotation = holder.createErrorAnnotation(toHighlight,
ScalaBundle.message(s"$elementVariance.type.$posVariance.position.of.$place", name, typeParam.toString, pos))
annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR)
}
}
def functionToSendIn(tp: ScType, v: Variance) = {
tp match {
case paramType: TypeParameterType =>
paramType.psiTypeParameter match {
case scTypeParam: ScTypeParam =>
val compareTo = scTypeParam.owner
val parentIt = checkParentOf.parents
//if it's a function inside function we do not highlight it unless trait or class is defined inside this function
parentIt.find(e => e == compareTo || e.isInstanceOf[ScFunction]) match {
case Some(_: ScFunction) =>
case _ =>
def findVariance: Variance = {
if (!checkIfTypeIsInSameBrackets) return v
if (PsiTreeUtil.isAncestor(scTypeParam.getParent, toHighlight, false))
//we do not highlight element if it was declared inside parameterized type.
if (!scTypeParam.getParent.getParent.isInstanceOf[ScTemplateDefinition]) return scTypeParam.variance
else return -v
if (toHighlight.getParent == scTypeParam.getParent.getParent) return -v
v
}
highlightVarianceError(scTypeParam.variance, findVariance, paramType.name)
}
case _ =>
}
case _ =>
}
(false, tp)
}
typeParam.recursiveVarianceUpdate(functionToSendIn, variance)
}
//fix for SCL-7176
private def checkAbstractMemberPrivateModifier(element: PsiElement, toHighlight: Seq[PsiElement], holder: AnnotationHolder) {
element match {
case fun: ScFunctionDeclaration if fun.isNative =>
case modOwner: ScModifierListOwner =>
modOwner.getModifierList.accessModifier match {
case Some(am) if am.isUnqualifiedPrivateOrThis =>
for (e <- toHighlight) {
val annotation = holder.createErrorAnnotation(e, ScalaBundle.message("abstract.member.not.have.private.modifier"))
annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR)
}
case _ =>
}
case _ =>
}
}
private def checkIntegerLiteral(literal: ScLiteral, holder: AnnotationHolder) {
val child = literal.getFirstChild.getNode
val text = literal.getText
val endsWithL = child.getText.endsWith("l") || child.getText.endsWith("L")
val textWithoutL = if (endsWithL) text.substring(0, text.length - 1) else text
val parent = literal.getParent
val scalaVersion = literal.scalaLanguageLevel
val isNegative = parent match {
// only "-1234" is negative, "- 1234" should be considered as positive 1234
case prefixExpr: ScPrefixExpr if prefixExpr.getChildren.length == 2 && prefixExpr.getFirstChild.getText == "-" => true
case _ => false
}
val (number, base) = textWithoutL match {
case t if t.startsWith("0x") || t.startsWith("0X") => (t.substring(2), 16)
case t if t.startsWith("0") && t.length >= 2 => (t.substring(1), 8)
case t => (t, 10)
}
// parse integer literal. the return is (Option(value), statusCode)
// the Option(value) will be the real integer represented by the literal, if it cannot fit in Long, It's None
// there is 3 value for statusCode:
// 0 -> the literal can fit in Int
// 1 -> the literal can fit in Long
// 2 -> the literal cannot fit in Long
def parseIntegerNumber(text: String, isNegative: Boolean): (Option[Long], Byte) = {
var value = 0l
val divider = if (base == 10) 1 else 2
var statusCode: Byte = 0
val limit = java.lang.Long.MAX_VALUE
val intLimit = java.lang.Integer.MAX_VALUE
var i = 0
for (d <- number.map(_.asDigit)) {
if (value > intLimit ||
intLimit / (base / divider) < value ||
intLimit - (d / divider) < value * (base / divider) &&
// This checks for -2147483648, value is 214748364, base is 10, d is 8. This check returns false.
// base 8 and 16 won't have this check because the divider is 2 .
!(isNegative && intLimit == value * base - 1 + d)) {
statusCode = 1
}
if (value < 0 ||
limit / (base / divider) < value ||
limit - (d / divider) < value * (base / divider) &&
// This checks for Long.MinValue, same as the the previous Int.MinValue check.
!(isNegative && limit == value * base - 1 + d)) {
return (None, 2)
}
value = value * base + d
i += 1
}
value = if (isNegative) -value else value
if (statusCode == 0) (Some(value.toInt), 0) else (Some(value), statusCode)
}
if (base == 8) {
val convertFix = new ConvertOctalToHexFix(literal)
scalaVersion match {
case Some(ScalaLanguageLevel.Scala_2_10) =>
val deprecatedMeaasge = "Octal number is deprecated in Scala-2.10 and will be removed in Scala-2.11"
val annotation = holder.createWarningAnnotation(literal, deprecatedMeaasge)
annotation.setHighlightType(ProblemHighlightType.LIKE_DEPRECATED)
annotation.registerFix(convertFix)
case Some(version) if version >= ScalaLanguageLevel.Scala_2_11 =>
val error = "Octal number is removed in Scala-2.11 and after"
val annotation = holder.createErrorAnnotation(literal, error)
annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR)
annotation.registerFix(convertFix)
return
case _ =>
}
}
val (_, status) = parseIntegerNumber(number, isNegative)
if (status == 2) { // the Integer number is out of range even for Long
val error = "Integer number is out of range even for type Long"
val annotation = holder.createErrorAnnotation(literal, error)
annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR_OR_WARNING)
} else {
if (status == 1 && !endsWithL) {
val error = "Integer number is out of range for type Int"
val annotation = if (isNegative) holder.createErrorAnnotation(parent, error) else holder.createErrorAnnotation(literal, error)
annotation.setHighlightType(ProblemHighlightType.GENERIC_ERROR_OR_WARNING)
val Long = literal.projectContext.stdTypes.Long
val conformsToTypeList = Seq(Long) ++ createTypeFromText("_root_.scala.math.BigInt", literal.getContext, literal)
val shouldRegisterFix = (if (isNegative) parent.asInstanceOf[ScPrefixExpr] else literal).expectedType().forall { x =>
conformsToTypeList.exists(_.weakConforms(x))
}
if (shouldRegisterFix) {
val addLtoLongFix: AddLToLongLiteralFix = new AddLToLongLiteralFix(literal)
annotation.registerFix(addLtoLongFix)
}
}
}
}
/**
* This method will return checked conformance if it's possible to check it.
* In other way it will return true to avoid red code.
* Check conformance in case l = r.
*/
def smartCheckConformance(l: TypeResult, r: TypeResult): Boolean = {
val leftType = l match {
case Right(res) => res
case _ => return true
}
val rightType = r match {
case Right(res) => res
case _ => return true
}
rightType.conforms(leftType)
}
}
object ScalaAnnotator {
val ignoreHighlightingKey: Key[(Long, mutable.HashSet[TextRange])] = Key.create("ignore.highlighting.key")
val usedImportsKey: Key[mutable.HashSet[ImportUsed]] = Key.create("used.imports.key")
def forProject(implicit ctx: ProjectContext): ScalaAnnotator = new ScalaAnnotator {
override implicit def projectContext: ProjectContext = ctx
}
}
| triplequote/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/annotator/ScalaAnnotator.scala | Scala | apache-2.0 | 64,562 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scheduler.onlinetesting
import com.google.inject.name.Named
import config.ScheduledJobConfig
import javax.inject.{ Inject, Singleton }
import model._
import play.api.Configuration
import play.api.mvc.RequestHeader
import play.modules.reactivemongo.ReactiveMongoComponent
import scheduler.BasicJobConfig
import scheduler.clustering.SingleInstanceScheduledJob
import services.onlinetesting.OnlineTestService
import uk.gov.hmrc.http.HeaderCarrier
import scala.concurrent.{ ExecutionContext, Future }
@Singleton
class SuccessPhase1TestJob @Inject() (@Named("Phase1OnlineTestService") val service: OnlineTestService,
val mongoComponent: ReactiveMongoComponent,
val config: SuccessPhase1TestJobConfig
) extends SuccessTestJob {
// override val service = Phase1TestService
override val successType: SuccessTestType = Phase1SuccessTestType
override val phase = "PHASE1"
// val config = SuccessPhase1TestJobConfig
}
@Singleton
class SuccessPhase3TestJob @Inject() (@Named("Phase3OnlineTestService") val service: OnlineTestService,
val mongoComponent: ReactiveMongoComponent,
val config: SuccessPhase3TestJobConfig
) extends SuccessTestJob {
// override val service = Phase3TestService
override val successType: SuccessTestType = Phase3SuccessTestType
override val phase = "PHASE3"
// val config = SuccessPhase3TestJobConfig
}
@Singleton
class SuccessPhase3SdipFsTestJob @Inject() (@Named("Phase3OnlineTestService") val service: OnlineTestService,
val mongoComponent: ReactiveMongoComponent,
val config: SuccessPhase3SdipFsTestJobConfig
) extends SuccessTestJob {
// override val service = Phase3TestService
override val successType: SuccessTestType = Phase3SuccessSdipFsTestType
override val phase = "PHASE3"
// val config = SuccessPhase3TestJobConfig NOTE: wrong used config here
}
trait SuccessTestJob extends SingleInstanceScheduledJob[BasicJobConfig[ScheduledJobConfig]] {
val service: OnlineTestService
val successType: SuccessTestType
val phase: String
def tryExecute()(implicit ec: ExecutionContext): Future[Unit] = {
implicit val rh: RequestHeader = EmptyRequestHeader
implicit val hc: HeaderCarrier = HeaderCarrier()
service.processNextTestForNotification(successType, phase, "passed")
}
}
@Singleton
class SuccessPhase1TestJobConfig @Inject () (config: Configuration) extends BasicJobConfig[ScheduledJobConfig](
config = config,
configPrefix = "scheduling.online-testing.success-phase1-test-job",
name = "SuccessPhase1TestJob"
)
@Singleton
class SuccessPhase3TestJobConfig @Inject () (config: Configuration) extends BasicJobConfig[ScheduledJobConfig](
config = config,
configPrefix = "scheduling.online-testing.success-phase3-test-job",
name = "SuccessPhase3TestJob"
)
@Singleton
class SuccessPhase3SdipFsTestJobConfig @Inject () (config: Configuration) extends BasicJobConfig[ScheduledJobConfig](
config = config,
configPrefix = "scheduling.online-testing.success-phase3-sdipfs-test-job",
name = "SuccessPhase3SdipFsTestJob"
)
| hmrc/fset-faststream | app/scheduler/onlinetesting/SuccessTestJob.scala | Scala | apache-2.0 | 3,979 |
package com.eevolution.context.dictionary.domain.model
import ai.x.play.json.Jsonx
import com.eevolution.context.dictionary.api.{ActiveEnabled, DomainModel, Identifiable, Traceable}
import org.joda.time.DateTime
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/e-Evolution
* Created by emeris.hernandez@e-evolution.com , www.e-evolution.com on 13/10/2017.
*/
/**
* Role Organization Access Entity
* @param roleId Role ID
* @param organizationId Organization ID
* @param tenantId Tenant ID
* @param isActive Is Active
* @param created Created
* @param createdBy Created By
* @param updated Updated
* @param updatedBy Updated By
* @param isReadOnly Is Read Only
* @param uuid UUID
*/
case class RoleOrganizationAccess(roleId: Int,
organizationId : Int ,
tenantId : Int ,
isActive : Boolean = true,
created : DateTime = DateTime.now,
createdBy : Int ,
updated : DateTime = DateTime.now ,
updatedBy : Int,
isReadOnly: Boolean = false,
uuid: String
) extends DomainModel
with ActiveEnabled
with Identifiable
with Traceable {
override type ActiveEnabled = this.type
override type Identifiable = this.type
override type Traceable = this.type
override def Id: Int = organizationId
override val entityName: String = "AD_Role_OrgAccess"
override val identifier: String = null
}
object RoleOrganizationAccess {
implicit lazy val jsonFormat = Jsonx.formatCaseClass[RoleOrganizationAccess]
def create(roleId: Int,
organizationId : Int,
tenantId : Int,
isActive : Boolean,
created : DateTime,
createdBy : Int ,
updated : DateTime,
updatedBy : Int,
isReadOnly: Boolean,
uuid: String) = RoleOrganizationAccess(roleId, organizationId, tenantId, isActive, created,
createdBy, updated, updatedBy, isReadOnly, uuid)
}
| adempiere/ADReactiveSystem | dictionary-api/src/main/scala/com/eevolution/context/dictionary/domain/model/RoleOrganizationAccess.scala | Scala | gpl-3.0 | 3,002 |
// scalastyle:off line.size.limit
/*
* Ported by Alistair Johnson from
* https://github.com/gwtproject/gwt/blob/master/user/test/com/google/gwt/emultest/java/math/BigDecimalArithmeticTest.java
*/
// scalastyle:on line.size.limit
package org.scalajs.testsuite.javalib.math
import java.math._
import org.junit.Test
import org.junit.Assert._
import org.scalajs.testsuite.utils.AssertThrows._
import org.scalajs.testsuite.utils.Platform.executingInJVM
class BigDecimalArithmeticTest {
@Test def testAddDiffScaleNegPos(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = -15
val b = "747233429293018787918347987234564568"
val bScale = 10
val c = "1231212478987482988429808779810457634781459480137916301878791834798.7234564568"
val cScale = 10
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.add(bNumber)
assertEquals(result.scale(), cScale)
assertEquals(result.toString, c)
}
@Test def testAddDiffScalePosNeg(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = 15
val b = "747233429293018787918347987234564568"
val bScale = -10
val c = "7472334294161400358170962860775454459810457634.781384756794987"
val cScale = 15
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.add(bNumber)
assertEquals(result.scale(), cScale)
assertEquals(result.toString, c)
}
@Test def testAddDiffScalePosPos(): Unit = {
val a = "100"
val aScale = 15
val b = "200"
val bScale = 14
val c = "2.100E-12"
val cScale = 15
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.add(bNumber)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testAddDiffScaleZeroZero(): Unit = {
val a = "0"
val aScale = -15
val b = "0"
val bScale = 10
val c = "0E-10"
val cScale = 10
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.add(bNumber)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testAddEqualScaleNegNeg(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = -10
val b = "747233429293018787918347987234564568"
val bScale = -10
val c = "1.231212478987483735663238072829245553129371991359555E+61"
val cScale = -10
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.add(bNumber)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testAddEqualScalePosPos(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = 10
val b = "747233429293018787918347987234564568"
val bScale = 10
val c = "123121247898748373566323807282924555312937.1991359555"
val cScale = 10
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.add(bNumber)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testAddMathContextDiffScalePosNeg(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = 15
val b = "747233429293018787918347987234564568"
val bScale = -10
val c = "7.47233429416141E+45"
val cScale = -31
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val mc = new MathContext(15, RoundingMode.CEILING)
val result = aNumber.add(bNumber, mc)
assertEquals(c.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testAddMathContextEqualScaleNegNeg(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = -10
val b = "747233429293018787918347987234564568"
val bScale = -10
val c = "1.2312E+61"
val cScale = -57
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val mc = new MathContext(5, RoundingMode.FLOOR)
val result = aNumber.add(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testAddMathContextEqualScalePosPos(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = 10
val b = "747233429293018787918347987234564568"
val bScale = 10
val c = "1.2313E+41"
val cScale = -37
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val mc = new MathContext(5, RoundingMode.UP)
val result = aNumber.add(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testAddZero(): Unit = {
var bd = new BigDecimal("123456789")
var sum = bd.add(BigDecimal.ZERO)
assertTrue(sum == bd)
sum = BigDecimal.ZERO.add(bd)
assertTrue(sum == bd)
bd = BigDecimal.valueOf(0L, 1)
sum = bd.add(BigDecimal.ZERO)
assertTrue(sum == bd)
}
@Test def testApproxScale(): Unit = {
val decVal = BigDecimal.TEN.multiply(new BigDecimal("0.1"))
val compare = decVal.compareTo(new BigDecimal("1.00"))
assertEquals(compare, 0)
}
@Test def testDivideAndRemainder1(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 45
val b = "134432345432345748766876876723342238476237823787879183470"
val bScale = 70
val res = "277923185514690367474770683"
val resScale = 0
val rem = "1.3032693871288309587558885943391070087960319452465789990E-15"
val remScale = 70
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divideAndRemainder(bNumber)
assertEquals(result(0).toString, res)
assertEquals(result(0).scale(), resScale)
assertEquals(result(1).toString, rem)
assertEquals(result(1).scale(), remScale)
}
@Test def testDivideAndRemainder2(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = -45
val b = "134432345432345748766876876723342238476237823787879183470"
val bScale = 70
val res = "2779231855146903674747706830969461168692256919247547952" +
"2608549363170374005512836303475980101168105698072946555" +
"6862849"
val resScale = 0
val rem = "3.4935796954060524114470681810486417234751682675102093970E-15"
val remScale = 70
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divideAndRemainder(bNumber)
assertEquals(result(0).toString, res)
assertEquals(result(0).scale(), resScale)
assertEquals(result(1).toString, rem)
assertEquals(result(1).scale(), remScale)
}
@Test def testDivideAndRemainderMathContextDOWN(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 45
val b = "134432345432345748766876876723342238476237823787879183470"
val bScale = 20
val precision = 15
val rm = RoundingMode.DOWN
val mc = new MathContext(precision, rm)
val res = "0E-25"
val resScale = 25
val rem = "3736186567876.876578956958765675671119238118911893939591735"
val remScale = 45
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divideAndRemainder(bNumber, mc)
assertEquals(result(0).toString, res)
assertEquals(result(0).scale(), resScale)
assertEquals(result(1).toString, rem)
assertEquals(result(1).scale(), remScale)
}
@Test def testDivideAndRemainderMathContextUP(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 45
val b = "134432345432345748766876876723342238476237823787879183470"
val bScale = 70
val precision = 75
val rm = RoundingMode.UP
val mc = new MathContext(precision, rm)
val res = "277923185514690367474770683"
val resScale = 0
val rem = "1.3032693871288309587558885943391070087960319452465789990E-15"
val remScale = 70
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divideAndRemainder(bNumber, mc)
assertEquals(result(0).toString, res)
assertEquals(result(0).scale(), resScale)
assertEquals(result(1).toString, rem)
assertEquals(result(1).scale(), remScale)
}
@Test def testDivideBigDecimal1(): Unit = {
val a = "-37361671119238118911893939591735"
val aScale = 10
val b = "74723342238476237823787879183470"
val bScale = 15
val c = "-5E+4"
val resScale = -4
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideBigDecimal2(): Unit = {
val a = "-37361671119238118911893939591735"
val aScale = 10
val b = "74723342238476237823787879183470"
val bScale = -15
val c = "-5E-26"
val resScale = 26
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideBigDecimalScaleMathContextCEILING(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 15
val b = "748766876876723342238476237823787879183470"
val bScale = 70
val precision = 21
val rm = RoundingMode.CEILING
val mc = new MathContext(precision, rm)
val c = "4.98978611802562512996E+70"
val resScale = -50
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideBigDecimalScaleMathContextDOWN(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 15
val b = "748766876876723342238476237823787879183470"
val bScale = 70
val precision = 21
val rm = RoundingMode.DOWN
val mc = new MathContext(precision, rm)
val c = "4.98978611802562512995E+70"
val resScale = -50
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideBigDecimalScaleMathContextFLOOR(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 15
val b = "748766876876723342238476237823787879183470"
val bScale = 70
val precision = 21
val rm = RoundingMode.FLOOR
val mc = new MathContext(precision, rm)
val c = "4.98978611802562512995E+70"
val resScale = -50
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideBigDecimalScaleMathContextHALF_DOWN(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 45
val b = "134432345432345748766876876723342238476237823787879183470"
val bScale = 70
val precision = 21
val rm = RoundingMode.HALF_DOWN
val mc = new MathContext(precision, rm)
val c = "2.77923185514690367475E+26"
val resScale = -6
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideBigDecimalScaleMathContextHALF_EVEN(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 45
val b = "134432345432345748766876876723342238476237823787879183470"
val bScale = 70
val precision = 21
val rm = RoundingMode.HALF_EVEN
val mc = new MathContext(precision, rm)
val c = "2.77923185514690367475E+26"
val resScale = -6
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideBigDecimalScaleMathContextHALF_UP(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 45
val b = "134432345432345748766876876723342238476237823787879183470"
val bScale = 70
val precision = 21
val rm = RoundingMode.HALF_UP
val mc = new MathContext(precision, rm)
val c = "2.77923185514690367475E+26"
val resScale = -6
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideBigDecimalScaleMathContextUP(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 15
val b = "748766876876723342238476237823787879183470"
val bScale = 10
val precision = 21
val rm = RoundingMode.UP
val mc = new MathContext(precision, rm)
val c = "49897861180.2562512996"
val resScale = 10
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideBigDecimalScaleRoundingModeCEILING(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 100
val b = "74723342238476237823787879183470"
val bScale = 15
val newScale = 45
val rm = RoundingMode.CEILING
val c = "1E-45"
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, newScale, rm)
assertEquals(result.toString, c)
assertEquals(result.scale(), newScale)
}
@Test def testDivideBigDecimalScaleRoundingModeDOWN(): Unit = {
val a = "-37361671119238118911893939591735"
val aScale = 10
val b = "74723342238476237823787879183470"
val bScale = 15
val newScale = 31
val rm = RoundingMode.DOWN
val c = "-50000.0000000000000000000000000000000"
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, newScale, rm)
assertEquals(result.toString, c)
assertEquals(result.scale(), newScale)
}
@Test def testDivideBigDecimalScaleRoundingModeFLOOR(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 100
val b = "74723342238476237823787879183470"
val bScale = 15
val newScale = 45
val rm = RoundingMode.FLOOR
val c = "0E-45"
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, newScale, rm)
assertEquals(result.toString, c)
assertEquals(result.scale(), newScale)
}
@Test def testDivideBigDecimalScaleRoundingModeHALF_DOWN(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 5
val b = "74723342238476237823787879183470"
val bScale = 15
val newScale = 7
val rm = RoundingMode.HALF_DOWN
val c = "500002603731642864013619132621009722.1803810"
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, newScale, rm)
assertEquals(result.toString, c)
assertEquals(result.scale(), newScale)
}
@Test def testDivideBigDecimalScaleRoundingModeHALF_EVEN(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 5
val b = "74723342238476237823787879183470"
val bScale = 15
val newScale = 7
val rm = RoundingMode.HALF_EVEN
val c = "500002603731642864013619132621009722.1803810"
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, newScale, rm)
assertEquals(result.toString, c)
assertEquals(result.scale(), newScale)
}
@Test def testDivideBigDecimalScaleRoundingModeHALF_UP(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = -51
val b = "74723342238476237823787879183470"
val bScale = 45
val newScale = 3
val rm = RoundingMode.HALF_UP
val c = "50000260373164286401361913262100972218038099522752460421" +
"05959924024355721031761947728703598332749334086415670525" +
"3761096961.670"
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, newScale, rm)
assertEquals(result.toString, c)
assertEquals(result.scale(), newScale)
}
@Test def testDivideBigDecimalScaleRoundingModeUP(): Unit = {
val a = "-37361671119238118911893939591735"
val aScale = 10
val b = "74723342238476237823787879183470"
val bScale = -15
val newScale = 31
val rm = RoundingMode.UP
val c = "-5.00000E-26"
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, newScale, rm)
assertEquals(result.toString, c)
assertEquals(result.scale(), newScale)
}
@Test def testDivideByZero(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = 15
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = BigDecimal.valueOf(0L)
expectThrows(classOf[ArithmeticException], aNumber.divide(bNumber))
}
@Test def testDivideExceptionInvalidRM(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = 15
val b = "747233429293018787918347987234564568"
val bScale = 10
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
expectThrows(classOf[IllegalArgumentException], aNumber.divide(bNumber, 100))
}
@Test def testDivideExceptionRM(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = 15
val b = "747233429293018787918347987234564568"
val bScale = 10
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
expectThrows(classOf[ArithmeticException],
aNumber.divide(bNumber, BigDecimal.ROUND_UNNECESSARY))
}
@Test def testDivideExpEqualsZero(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = -15
val b = "747233429293018787918347987234564568"
val bScale = 10
val c = "1.64769459009933764189139568605273529E+40"
val resScale = -5
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_CEILING)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideExpGreaterZero(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = -15
val b = "747233429293018787918347987234564568"
val bScale = 20
val c = "1.647694590099337641891395686052735285121058381E+50"
val resScale = -5
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_CEILING)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideExpLessZero(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = 15
val b = "747233429293018787918347987234564568"
val bScale = 10
val c = "1.64770E+10"
val resScale = -5
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_CEILING)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideLargeScale(): Unit = {
val arg1 = new BigDecimal("320.0E+2147483647")
val arg2 = new BigDecimal("6E-2147483647")
expectThrows(classOf[ArithmeticException],
arg1.divide(arg2, Int.MaxValue, RoundingMode.CEILING))
}
@Test def testDivideRemainderIsZero(): Unit = {
val a = "8311389578904553209874735431110"
val aScale = -15
val b = "237468273682987234567849583746"
val bScale = 20
val c = "3.5000000000000000000000000000000E+36"
val resScale = -5
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_CEILING)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundCeilingNeg(): Unit = {
val a = "-92948782094488478231212478987482988429808779810457634781384756794987"
val aScale = -24
val b = "7472334223847623782375469293018787918347987234564568"
val bScale = 13
val c = "-1.24390557635720517122423359799283E+53"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_CEILING)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundCeilingPos(): Unit = {
val a = "92948782094488478231212478987482988429808779810457634781384756794987"
val aScale = -24
val b = "7472334223847623782375469293018787918347987234564568"
val bScale = 13
val c = "1.24390557635720517122423359799284E+53"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_CEILING)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundDownNeg(): Unit = {
val a = "-92948782094488478231212478987482988429808779810457634781384756794987"
val aScale = -24
val b = "7472334223847623782375469293018787918347987234564568"
val bScale = 13
val c = "-1.24390557635720517122423359799283E+53"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_DOWN)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundDownPos(): Unit = {
val a = "92948782094488478231212478987482988429808779810457634781384756794987"
val aScale = -24
val b = "7472334223847623782375469293018787918347987234564568"
val bScale = 13
val c = "1.24390557635720517122423359799283E+53"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_DOWN)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundFloorNeg(): Unit = {
val a = "-92948782094488478231212478987482988429808779810457634781384756794987"
val aScale = -24
val b = "7472334223847623782375469293018787918347987234564568"
val bScale = 13
val c = "-1.24390557635720517122423359799284E+53"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_FLOOR)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundFloorPos(): Unit = {
val a = "92948782094488478231212478987482988429808779810457634781384756794987"
val aScale = -24
val b = "7472334223847623782375469293018787918347987234564568"
val bScale = 13
val c = "1.24390557635720517122423359799283E+53"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_FLOOR)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundHalfDownNeg(): Unit = {
val a = "-92948782094488478231212478987482988429808779810457634781384756794987"
val aScale = -24
val b = "7472334223847623782375469293018787918347987234564568"
val bScale = 13
val c = "-1.24390557635720517122423359799284E+53"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_HALF_DOWN)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundHalfDownNeg1(): Unit = {
val a = "-92948782094488478231212478987482988798104576347813847567949855464535634534563456"
val aScale = -24
val b = "74723342238476237823754692930187879183479"
val bScale = 13
val c = "-1.2439055763572051712242335979928354832010167729111113605E+76"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_HALF_DOWN)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundHalfDownNeg2(): Unit = {
val a = "-37361671119238118911893939591735"
val aScale = 10
val b = "74723342238476237823787879183470"
val bScale = 15
val c = "0E+5"
val resScale = -5
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_HALF_DOWN)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundHalfDownPos(): Unit = {
val a = "92948782094488478231212478987482988429808779810457634781384756794987"
val aScale = -24
val b = "7472334223847623782375469293018787918347987234564568"
val bScale = 13
val c = "1.24390557635720517122423359799284E+53"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_HALF_DOWN)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundHalfDownPos1(): Unit = {
val a = "92948782094488478231212478987482988798104576347813847567949855464535634534563456"
val aScale = -24
val b = "74723342238476237823754692930187879183479"
val bScale = 13
val c = "1.2439055763572051712242335979928354832010167729111113605E+76"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_HALF_DOWN)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundHalfEvenNeg(): Unit = {
val a = "-92948782094488478231212478987482988429808779810457634781384756794987"
val aScale = -24
val b = "7472334223847623782375469293018787918347987234564568"
val bScale = 13
val c = "-1.24390557635720517122423359799284E+53"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_HALF_EVEN)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundHalfEvenNeg1(): Unit = {
val a = "-92948782094488478231212478987482988798104576347813847567949855464535634534563456"
val aScale = -24
val b = "74723342238476237823754692930187879183479"
val bScale = 13
val c = "-1.2439055763572051712242335979928354832010167729111113605E+76"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_HALF_EVEN)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundHalfEvenNeg2(): Unit = {
val a = "-37361671119238118911893939591735"
val aScale = 10
val b = "74723342238476237823787879183470"
val bScale = 15
val c = "0E+5"
val resScale = -5
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_HALF_EVEN)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundHalfEvenPos(): Unit = {
val a = "92948782094488478231212478987482988429808779810457634781384756794987"
val aScale = -24
val b = "7472334223847623782375469293018787918347987234564568"
val bScale = 13
val c = "1.24390557635720517122423359799284E+53"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_HALF_EVEN)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundHalfEvenPos1(): Unit = {
val a = "92948782094488478231212478987482988798104576347813847567949855464535634534563456"
val aScale = -24
val b = "74723342238476237823754692930187879183479"
val bScale = 13
val c = "1.2439055763572051712242335979928354832010167729111113605E+76"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_HALF_EVEN)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundHalfUpNeg(): Unit = {
val a = "-92948782094488478231212478987482988429808779810457634781384756794987"
val aScale = -24
val b = "7472334223847623782375469293018787918347987234564568"
val bScale = 13
val c = "-1.24390557635720517122423359799284E+53"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_HALF_UP)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundHalfUpNeg1(): Unit = {
val a = "-92948782094488478231212478987482988798104576347813847567949855464535634534563456"
val aScale = -24
val b = "74723342238476237823754692930187879183479"
val bScale = 13
val c = "-1.2439055763572051712242335979928354832010167729111113605E+76"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_HALF_UP)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundHalfUpNeg2(): Unit = {
val a = "-37361671119238118911893939591735"
val aScale = 10
val b = "74723342238476237823787879183470"
val bScale = 15
val c = "-1E+5"
val resScale = -5
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_HALF_UP)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundHalfUpPos(): Unit = {
val a = "92948782094488478231212478987482988429808779810457634781384756794987"
val aScale = -24
val b = "7472334223847623782375469293018787918347987234564568"
val bScale = 13
val c = "1.24390557635720517122423359799284E+53"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_HALF_UP)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundHalfUpPos1(): Unit = {
val a = "92948782094488478231212478987482988798104576347813847567949855464535634534563456"
val aScale = -24
val b = "74723342238476237823754692930187879183479"
val bScale = 13
val c = "1.2439055763572051712242335979928354832010167729111113605E+76"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_HALF_UP)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundUpNeg(): Unit = {
val a = "-92948782094488478231212478987482988429808779810457634781384756794987"
val aScale = -24
val b = "7472334223847623782375469293018787918347987234564568"
val bScale = 13
val c = "-1.24390557635720517122423359799284E+53"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_UP)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideRoundUpPos(): Unit = {
val a = "92948782094488478231212478987482988429808779810457634781384756794987"
val aScale = -24
val b = "7472334223847623782375469293018787918347987234564568"
val bScale = 13
val c = "1.24390557635720517122423359799284E+53"
val resScale = -21
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divide(bNumber, resScale, BigDecimal.ROUND_UP)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideSmall(): Unit = {
val a = BigDecimal.valueOf(6)
val b = BigDecimal.valueOf(2)
var quotient = a.divide(b)
assertEquals(quotient.toString, "3")
quotient = a.divideToIntegralValue(b)
assertEquals(quotient.toString, "3")
quotient = a.divide(BigDecimal.ONE)
assertEquals(quotient.toString, "6")
quotient = a.divide(BigDecimal.ONE.negate())
assertEquals(quotient.toString, "-6")
}
@Test def testDivideToIntegralValue(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 45
val b = "134432345432345748766876876723342238476237823787879183470"
val bScale = 70
val c = "277923185514690367474770683"
val resScale = 0
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divideToIntegralValue(bNumber)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideToIntegralValueMathContextDOWN(): Unit = {
val a = "3736186567876876578956958769675785435673453453653543654354365435675671119238118911893939591735"
val aScale = 45
val b = "134432345432345748766876876723342238476237823787879183470"
val bScale = 70
val precision = 75
val rm = RoundingMode.DOWN
val mc = new MathContext(precision, rm)
val c = "2.7792318551469036747477068339450205874992634417590178670822889E+62"
val resScale = -1
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divideToIntegralValue(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideToIntegralValueMathContextUP(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 45
val b = "134432345432345748766876876723342238476237823787879183470"
val bScale = 70
val precision = 32
val rm = RoundingMode.UP
val mc = new MathContext(precision, rm)
val c = "277923185514690367474770683"
val resScale = 0
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.divideToIntegralValue(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), resScale)
}
@Test def testDivideZero(): Unit = {
var quotient = BigDecimal.ZERO.divide(BigDecimal.ONE)
assertTrue(BigDecimal.ZERO == quotient)
quotient = BigDecimal.ZERO.negate().divide(BigDecimal.ONE)
assertTrue(BigDecimal.ZERO == quotient)
expectThrows(classOf[ArithmeticException], BigDecimal.ZERO.divide(BigDecimal.ZERO))
expectThrows(classOf[ArithmeticException], BigDecimal.ONE.divide(BigDecimal.ZERO))
expectThrows(classOf[ArithmeticException], BigDecimal.ONE.divideToIntegralValue(BigDecimal.ZERO))
}
@Test def testDivideToIntegralValue_on_floating_points__issue_1979(): Unit = {
val one = new BigDecimal(1.0)
val oneAndHalf = new BigDecimal(1.5)
val a0 = new BigDecimal(3.0)
val a1 = new BigDecimal(3.1)
val a2 = new BigDecimal(3.21)
val a3 = new BigDecimal(3.321)
val b0 = new BigDecimal(3.0)
val b1 = new BigDecimal(2.0)
assertEquals(b0, a0.divideToIntegralValue(one))
assertEquals(b0.setScale(51), a1.divideToIntegralValue(one))
assertEquals(b0.setScale(50), a2.divideToIntegralValue(one))
assertEquals(b0.setScale(51), a3.divideToIntegralValue(one))
assertEquals(b1, a0.divideToIntegralValue(oneAndHalf))
assertEquals(b1.setScale(50), a1.divideToIntegralValue(oneAndHalf))
assertEquals(b1.setScale(49), a2.divideToIntegralValue(oneAndHalf))
assertEquals(b1.setScale(50), a3.divideToIntegralValue(oneAndHalf))
}
@Test def testMultiplyDiffScaleNegPos(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = -15
val b = "747233429293018787918347987234564568"
val bScale = 10
val c = "9.20003122862175749786430095741145455670101391569026662845893091880727173060570190220616E+91"
val cScale = -5
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.multiply(bNumber)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testMultiplyDiffScalePosNeg(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = 10
val b = "747233429293018787918347987234564568"
val bScale = -10
val c = "920003122862175749786430095741145455670101391569026662845893091880727173060570190220616"
val cScale = 0
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.multiply(bNumber)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testMultiplyEqualScaleNegNeg(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = -15
val b = "747233429293018787918347987234564568"
val bScale = -10
val c = "9.20003122862175749786430095741145455670101391569026662845893091880727173060570190220616E+111"
val cScale = -25
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.multiply(bNumber)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testMultiplyMathContextDiffScaleNegPos(): Unit = {
val a = "488757458676796558668876576576579097029810457634781384756794987"
val aScale = -63
val b = "747233429293018787918347987234564568"
val bScale = 63
val c = "3.6521591193960361339707130098174381429788164316E+98"
val cScale = -52
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val mc = new MathContext(47, RoundingMode.HALF_UP)
val result = aNumber.multiply(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testMultiplyMathContextDiffScalePosNeg(): Unit = {
val a = "987667796597975765768768767866756808779810457634781384756794987"
val aScale = 100
val b = "747233429293018787918347987234564568"
val bScale = -70
val c = "7.3801839465418518653942222612429081498248509257207477E+68"
val cScale = -16
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val mc = new MathContext(53, RoundingMode.HALF_UP)
val result = aNumber.multiply(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testMultiplyMathContextScalePosPos(): Unit = {
val a = "97665696756578755423325476545428779810457634781384756794987"
val aScale = -25
val b = "87656965586786097685674786576598865"
val bScale = 10
val c = "8.561078619600910561431314228543672720908E+108"
val cScale = -69
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val mc = new MathContext(40, RoundingMode.HALF_DOWN)
val result = aNumber.multiply(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testMultiplyScalePosPos(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = 15
val b = "747233429293018787918347987234564568"
val bScale = 10
val c = "92000312286217574978643009574114545567010139156902666284589309.1880727173060570190220616"
val cScale = 25
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.multiply(bNumber)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testPow(): Unit = {
val a = "123121247898748298842980"
val aScale = 10
val exp = 10
val c = "8004424019039195734129783677098845174704975003788210729597" +
"4875206425711159855030832837132149513512555214958035390490" +
"798520842025826.594316163502809818340013610490541783276343" +
"6514490899700151256484355936102754469438371850240000000000"
val cScale = 100
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val result = aNumber.pow(exp)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testPow0(): Unit = {
val a = "123121247898748298842980"
val aScale = 10
val exp = 0
val c = "1"
val cScale = 0
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val result = aNumber.pow(exp)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testPowMathContext(): Unit = {
val a = "123121247898748298842980"
val aScale = 10
val exp = 10
val c = "8.0044E+130"
val cScale = -126
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val mc = new MathContext(5, RoundingMode.HALF_UP)
val result = aNumber.pow(exp, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testRemainder1(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 45
val b = "134432345432345748766876876723342238476237823787879183470"
val bScale = 10
val res = "3736186567876.876578956958765675671119238118911893939591735"
val resScale = 45
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.remainder(bNumber)
assertEquals(result.toString, res)
assertEquals(result.scale(), resScale)
}
@Test def testRemainder2(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = -45
val b = "134432345432345748766876876723342238476237823787879183470"
val bScale = 10
val res = "1149310942946292909508821656680979993738625937.2065885780"
val resScale = 10
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.remainder(bNumber)
assertEquals(result.toString, res)
assertEquals(result.scale(), resScale)
}
@Test def testRemainderMathContextHALF_DOWN(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = -45
val b = "134432345432345748766876876723342238476237823787879183470"
val bScale = 10
val precision = 75
val rm = RoundingMode.HALF_DOWN
val mc = new MathContext(precision, rm)
val res = "1149310942946292909508821656680979993738625937.2065885780"
val resScale = 10
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.remainder(bNumber, mc)
assertEquals(result.toString, res)
assertEquals(result.scale(), resScale)
}
@Test def testRemainderMathContextHALF_UP(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 45
val b = "134432345432345748766876876723342238476237823787879183470"
val bScale = 10
val precision = 15
val rm = RoundingMode.HALF_UP
val mc = new MathContext(precision, rm)
val res = "3736186567876.876578956958765675671119238118911893939591735"
val resScale = 45
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.remainder(bNumber, mc)
assertEquals(result.toString, res)
assertEquals(result.scale(), resScale)
}
@Test def testRoundMathContextCEILING(): Unit = {
var `val` = BigDecimal.valueOf(1.5)
val result = `val`.round(new MathContext(1, RoundingMode.CEILING))
assertEquals(result.toString, "2")
assertEquals(result.scale(), 0)
assertEquals(result.precision(), 1)
`val` = BigDecimal.valueOf(5.43445663479765)
`val` = `val`.setScale(`val`.scale() + 1, RoundingMode.CEILING)
.round(new MathContext(1, RoundingMode.CEILING))
`val` = BigDecimal.valueOf(5.4344566347976)
`val` = `val`.setScale(`val`.scale() + 2, RoundingMode.CEILING)
.round(new MathContext(1, RoundingMode.CEILING))
var test = BigDecimal.valueOf(12.4344566347976)
test = test.setScale(test.scale() + 1, RoundingMode.CEILING)
.round(new MathContext(1, RoundingMode.CEILING))
}
@Test def testRoundMathContextHALF_DOWN(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = -45
val precision = 75
val rm = RoundingMode.HALF_DOWN
val mc = new MathContext(precision, rm)
val res = "3.736186567876876578956958765675671119238118911893939591735E+102"
val resScale = -45
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val result = aNumber.round(mc)
assertEquals(result.toString, res)
assertEquals(result.scale(), resScale)
}
@Test def testRoundMathContextHALF_UP(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 45
val precision = 15
val rm = RoundingMode.HALF_UP
val mc = new MathContext(precision, rm)
val res = "3736186567876.88"
val resScale = 2
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val result = aNumber.round(mc)
assertEquals(result.toString, res)
assertEquals(result.scale(), resScale)
}
@Test def testRoundMathContextPrecision0(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = 45
val precision = 0
val rm = RoundingMode.HALF_UP
val mc = new MathContext(precision, rm)
val res = "3736186567876.876578956958765675671119238118911893939591735"
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val result = aNumber.round(mc)
assertEquals(result.toString, res)
assertEquals(result.scale(), aScale)
}
@Test def testSubtractDiffScaleNegPos(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = -15
val b = "747233429293018787918347987234564568"
val bScale = 10
val c = "1231212478987482988429808779810457634781310033452057698121208165201.2765435432"
val cScale = 10
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.subtract(bNumber)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testSubtractDiffScalePosNeg(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = 15
val b = "747233429293018787918347987234564568"
val bScale = -10
val c = "-7472334291698975400195996883915836900189542365.218615243205013"
val cScale = 15
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.subtract(bNumber)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testSubtractEqualScaleNegNeg(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = -10
val b = "747233429293018787918347987234564568"
val bScale = -10
val c = "1.231212478987482241196379486791669716433397522230419E+61"
val cScale = -10
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.subtract(bNumber)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testSubtractEqualScalePosPos(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = 10
val b = "747233429293018787918347987234564568"
val bScale = 10
val c = "123121247898748224119637948679166971643339.7522230419"
val cScale = 10
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val result = aNumber.subtract(bNumber)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testSubtractMathContextDiffScaleNegPos(): Unit = {
val a = "986798656676789766678767876078779810457634781384756794987"
val aScale = -15
val b = "747233429293018787918347987234564568"
val bScale = 40
val c = "9.867986566767897666787678760787798104576347813847567949870000000000000E+71"
val cScale = -2
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val mc = new MathContext(70, RoundingMode.HALF_DOWN)
val result = aNumber.subtract(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testSubtractMathContextDiffScalePosNeg(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = 15
val b = "747233429293018787918347987234564568"
val bScale = -10
val c = "-7.4723342916989754E+45"
val cScale = -29
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val mc = new MathContext(17, RoundingMode.DOWN)
val result = aNumber.subtract(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testSubtractMathContextEqualScalePosPos(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = 10
val b = "747233429293018787918347987234564568"
val bScale = 10
val c = "1.23121247898749E+41"
val cScale = -27
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val bNumber = new BigDecimal(new BigInteger(b), bScale)
val mc = new MathContext(15, RoundingMode.CEILING)
val result = aNumber.subtract(bNumber, mc)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
@Test def testUlpNeg(): Unit = {
val a = "-3736186567876876578956958765675671119238118911893939591735"
val aScale = 45
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val result = aNumber.ulp()
val res = "1E-45"
val resScale = 45
assertEquals(result.toString, res)
assertEquals(result.scale(), resScale)
}
@Test def testUlpPos(): Unit = {
val a = "3736186567876876578956958765675671119238118911893939591735"
val aScale = -45
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val result = aNumber.ulp()
val res = "1E+45"
val resScale = -45
assertEquals(result.toString, res)
assertEquals(result.scale(), resScale)
}
@Test def testUlpZero(): Unit = {
val a = "0"
val aScale = 2
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val result = aNumber.ulp()
val res = "0.01"
val resScale = 2
assertEquals(result.toString, res)
assertEquals(result.scale(), resScale)
}
@Test def testZeroPow0(): Unit = {
val c = "1"
val cScale = 0
val result = BigDecimal.ZERO.pow(0)
assertEquals(result.toString, c)
assertEquals(result.scale(), cScale)
}
}
| mdedetrich/scala-js | test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/math/BigDecimalArithmeticTest.scala | Scala | bsd-3-clause | 55,410 |
package com.metebalci
import org.scalatest._
import org.scalatest.Assertions._
class ModelLensSpec extends FunSuite {
test("make street name upper case") {
val foo = Employee(Company(Address("Buyukdere", "Ari")))
val myLens = ModelLenses.companyLens andThen
ModelLenses.addressLens andThen
ModelLenses.streetLens
assert( myLens.get(foo) == "Buyukdere" )
assert( myLens.put(myLens.get(foo).toUpperCase, foo) ==
Employee(Company(Address("BUYUKDERE", "Ari"))) )
}
test("make block upper case") {
val foo = Employee(Company(Address("Buyukdere", "Ari")))
val myLens = ModelLenses.companyLens andThen
ModelLenses.addressLens andThen
ModelLenses.blockLens
assert( myLens.get(foo) == "Ari" )
assert( myLens.put(myLens.get(foo).toUpperCase, foo) ==
Employee(Company(Address("Buyukdere", "ARI"))) )
}
}
| metebalci/experiment-lenses-scala | src/test/scala/09-ModelLensSpec.scala | Scala | gpl-2.0 | 889 |
package uk.gov.gds.ier.validation.constraints
import play.api.data.validation.{Invalid, Valid, Constraint}
import uk.gov.gds.ier.model.PartialNationality
import uk.gov.gds.ier.validation.{FormKeys, ErrorMessages}
import uk.gov.gds.ier.validation.constants.NationalityConstants._
import scala.Some
trait NationalityConstraints {
self: ErrorMessages
with FormKeys =>
lazy val notTooManyNationalities = Constraint[PartialNationality](keys.nationality.key) {
nationality =>
if (nationality.otherCountries.size <= numberMaxOfOtherCountries) Valid
else Invalid("You can specifiy no more than five countries", keys.nationality)
}
lazy val nationalityIsChosen = Constraint[PartialNationality](keys.nationality.key) {
nationality =>
if (nationality.british == Some(true) || nationality.irish == Some(true)) Valid
else if (nationality.otherCountries.exists(_.nonEmpty) && nationality.hasOtherCountry.exists(b => b)) Valid
else if (nationality.noNationalityReason.isDefined) Valid
else Invalid("Please answer this question", keys.nationality)
}
lazy val otherCountry0IsValid = otherCountryIsValid(0)
lazy val otherCountry1IsValid = otherCountryIsValid(1)
lazy val otherCountry2IsValid = otherCountryIsValid(2)
private def otherCountryIsValid(i:Int) = Constraint[PartialNationality](keys.nationality.otherCountries.key) {
nationality =>
if (nationality.otherCountries.isEmpty || !nationality.hasOtherCountry.exists(b => b)) Valid
else if (nationality.otherCountries.size != i+1) Valid
else if (nationality.otherCountries.size > i
&& validNationalitiesList.contains(nationality.otherCountries(i).toLowerCase)) Valid
else Invalid("This is not a valid entry", keys.nationality.otherCountries.item(i))
}
}
| michaeldfallen/ier-frontend | app/uk/gov/gds/ier/validation/constraints/NationalityConstraints.scala | Scala | mit | 1,804 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wvlet.log
import java.util.concurrent.TimeUnit
import scala.concurrent.duration.Duration
/**
*/
class LogLevelScannerTest extends Spec {
override protected def before: Unit = {
// Ensure stopping log level scanner
Logger.stopScheduledLogLevelScan
}
override protected def after: Unit = {
Logger.stopScheduledLogLevelScan
}
protected def withScanner[U](config: LogLevelScannerConfig)(f: => U): U = {
val scanner = new LogLevelScanner
try {
val lastScanCount = scanner.scanCount.get
scanner.setConfig(config)
scanner.start
// Wait the first scan
while (scanner.scanCount.get == lastScanCount) {
Thread.sleep(15)
}
f
} finally {
scanner.stop
}
}
test("scan log levels only once") {
val l = Logger("wvlet.log.test")
l.setLogLevel(LogLevel.WARN)
assert(l.getLogLevel == LogLevel.WARN)
// Load log-test.properties
LogLevelScanner.scanLogLevels
assert(l.getLogLevel == LogLevel.DEBUG)
}
test("scan loglevels") {
val l = Logger("wvlet.log.test")
l.setLogLevel(LogLevel.WARN)
assert(l.getLogLevel == LogLevel.WARN)
// Load log-test.properties
withScanner(
LogLevelScannerConfig(LogLevelScanner.DEFAULT_LOGLEVEL_FILE_CANDIDATES, Duration(10, TimeUnit.MILLISECONDS))
) {
assert(l.getLogLevel == LogLevel.DEBUG)
}
}
test("load another loglevel file") {
val l = Logger("wvlet.log.test")
l.setLogLevel(LogLevel.WARN)
assert(l.getLogLevel == LogLevel.WARN)
withScanner(LogLevelScannerConfig(List("wvlet/log/custom-log.properties"), Duration(10, TimeUnit.MILLISECONDS))) {
assert(l.getLogLevel == LogLevel.ERROR)
}
}
test("load invalid loglevel file safely") {
val l = Logger("wvlet.log.test")
l.setLogLevel(LogLevel.TRACE)
withScanner(
LogLevelScannerConfig(List("wvlet/log/invalid-loglevel.properties"), Duration(10, TimeUnit.MILLISECONDS))
) {
// Should ignore unknown log level string
assert(l.getLogLevel == LogLevel.TRACE)
}
}
}
| wvlet/airframe | airframe-log/.jvm/src/test/scala/wvlet/log/LogLevelScannerTest.scala | Scala | apache-2.0 | 2,642 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.app.nlp
package object ner {
lazy val NER2 = BasicOntonotesNERWSJ
val IobConllNerDomain = BioConllNerDomain
}
| digitalreasoning/factorie-scala-210 | src/main/scala/cc/factorie/app/nlp/ner/package.scala | Scala | apache-2.0 | 890 |
package scalariform.formatter
import scalariform.parser._
import scalariform.formatter.preferences._
// format: OFF
class PackageFormatterTest extends AbstractFormatterTest {
override val debug = false
type Result = CompilationUnit
def parse(parser: ScalaParser) = parser.compilationUnit()
def format(formatter: ScalaFormatter, result: Result) = formatter.format(result)(FormatterState())
"" ==> ""
"package foo . bar . baz" ==> "package foo.bar.baz"
"""package foo {
|package bar {
|class Baz
|}
|}""" ==>
"""package foo {
| package bar {
| class Baz
| }
|}"""
"package foo" ==> "package foo"
"""/* foo */
|package wibble""" ==>
"""/* foo */
|package wibble"""
"""package a
|{}""" ==>
"""package a {}"""
"""package a {}
|""" ==>
"""package a {}
|"""
{
implicit val formattingPreferences = FormattingPreferences.setPreference(IndentPackageBlocks, false)
"""package foo {
|package bar {
|class Baz
|}
|}""" ==>
"""package foo {
|package bar {
|class Baz
|}
|}"""
}
}
| jkinkead/scalariform | scalariform/src/test/scala/scalariform/formatter/PackageFormatterTest.scala | Scala | mit | 1,114 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.radanalytics.streaming.amqp
import org.apache.qpid.proton.amqp.messaging.{AmqpValue, Section}
import org.apache.qpid.proton.message.Message
/**
* Provides implementation for a function which has an AMQP messages as input
* and provide a different type T instance as output
*
* @tparam T
*/
class AMQPBodyFunction[T] extends ((Message) => Option[T]) with Serializable {
override def apply(message: Message): Option[T] = {
val body: Section = message.getBody()
if (body.isInstanceOf[AmqpValue]) {
val content: T = body.asInstanceOf[AmqpValue].getValue().asInstanceOf[T]
Some(content)
} else {
None
}
}
}
| redhatanalytics/dstream-amqp | src/main/scala/io/radanalytics/streaming/amqp/AMQPBodyFunction.scala | Scala | apache-2.0 | 1,467 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License,
*
* Contributors:
* Hao Jiang - initial API and implementation
*/
package edu.uchicago.cs.encsel.util.word
import org.junit.Assert._
import org.junit.Test
class DictTest {
@Test
def testLookup(): Unit = {
val book = Dict.lookup("book")
assertEquals("book", book._1)
val rpt = Dict.lookup("rpt")
assertEquals("report", rpt._1)
val cmd = Dict.lookup("cmd")
println(cmd)
assertEquals("command", cmd._1)
val yr = Dict.lookup("yr")
assertEquals("year", yr._1)
val dt = Dict.lookup("dt")
assertEquals("date", dt._1)
val zip = Dict.lookup("zip")
assertEquals("zip", zip._1)
val non = Dict.lookup("jiang")
println(non)
assertEquals("jiang", non._1)
}
@Test
def testAbbrv: Unit = {
assertEquals("rpt", Dict.abbreviate("repeat"))
assertEquals("rpt", Dict.abbreviate("report"))
}
@Test
def testCorrectWord: Unit = {
assertEquals("identification", Dict.lookup("identificaiton")._1)
}
@Test
def testPlural: Unit = {
val codes = Dict.lookup("codes")
assertEquals("code", codes._1)
}
} | harperjiang/enc-selector | src/test/scala/edu/uchicago/cs/encsel/util/word/DictTest.scala | Scala | apache-2.0 | 1,891 |
import scala.quoted.*
object Test {
inline def foo(x: Expr[Int])(using Quotes): Expr[Int] = '{ // error
println("foo")
${
${??? : Expr[Int]}
x
}
}
}
| dotty-staging/dotty | tests/neg-macros/inline-quote.scala | Scala | apache-2.0 | 182 |
package com.plasmaconduit.json
import com.plasmaconduit.validation.{Validation, Success => ValidationSuccess, Failure => ValidationFailure}
import scala.util.parsing.combinator._
object JsonParser extends RegexParsers {
case class FormatError(message: String)
override def skipWhitespace = false
val commaSep = padded(",")
val textChar = "[^\"\r\n]".r
val asciiEsc: Parser[String] = "\\" | "\"" | "/"
val backspaceEsc: Parser[String] = "b" ^^ { _ => "\b" }
val formFeedEsc: Parser[String] = "f" ^^ { _ => "\f" }
val newLineEsc: Parser[String] = "n" ^^ { _ => "\n" }
val carriageReturnEsc: Parser[String] = "r" ^^ { _ => "\r" }
val tabEsc: Parser[String] = "t" ^^ { _ => "\t" }
val escapedSeq: Parser[String] =
"\\" ~> (asciiEsc | backspaceEsc | formFeedEsc | newLineEsc | carriageReturnEsc | tabEsc | "u")
val booleanTrue: Parser[JsBoolean] = "true" ^^ { _ => JsBoolean(true) }
val booleanFalse: Parser[JsBoolean] = "false" ^^ { _ => JsBoolean(false) }
val boolean: Parser[JsBoolean] = booleanTrue | booleanFalse
val keyString: Parser[String] = "\"" ~> rep(escapedSeq | textChar) <~ "\"" ^^ { n =>
n.mkString("")
}
val valueString: Parser[JsString] = "\"" ~> rep(escapedSeq | textChar) <~ "\"" ^^ { n =>
JsString(n.mkString(""))
}
val int: Parser[JsLong] = "-?[0-9]+".r ^^ { n => JsLong(n.toLong) }
val float: Parser[JsFloat] = "-?[0-9]+\\.[0-9]+".r ^^ { n => JsFloat(n.toFloat) }
val array: Parser[JsArray] = "[" ~> padded(repsep(value, commaSep)) <~ "]" ^^ { n => JsArray(n:_*) }
val objPairs: Parser[Map[String, JsValue]] = repsep(padded(keyString) ~ ":" ~ value, commaSep) ^^ { n =>
n.foldLeft(Map[String, JsValue]()) {
case (m, k ~ _ ~ v) => m + (k.toString -> v)
}
}
val obj: Parser[JsObject] = "{" ~> padded(objPairs) <~ "}" ^^ { n => JsObject(n) }
val nullP: Parser[JsNull.type] = "null" ^^ { _ => JsNull }
private def padded[A](parser: Parser[A]): Parser[A] = opt(whiteSpace) ~> parser <~ opt(whiteSpace)
def value: Parser[JsValue] = padded(boolean | valueString | float | int | array | obj | nullP)
def parse(input: String): Validation[FormatError, JsValue] = parseAll(value, input) match {
case Success(json, _) => ValidationSuccess(json)
case NoSuccess(error, _) => ValidationFailure(FormatError(error))
}
} | plasmaconduit/json | src/main/scala/com/plasmaconduit/json/JsonParser.scala | Scala | mit | 2,343 |
package org.mdpeg.examples
import java.io.PrintWriter
import org.mdpeg.MarkdownParser
object ToHtmlExample extends App {
val inputFilePath = "insert you file path here" // for example, input.md
val outputHtmlFilePath = "insert you file path here" // for example, output.html
val fileText = scala.io.Source.fromFile(inputFilePath).mkString
//run markdown parser to get an AST
MarkdownParser.parse(fileText) match {
case Left(errors) => println(errors) // in this case something went wrong, meaning that parser failed to parse given document
case Right(ast) =>
//transform the AST to an HTML string
MarkdownParser.transformToHtml(ast) match {
case Left(error) => println(error) // error case
case Right(html) =>
//write the HTML to the output file
new PrintWriter(outputHtmlFilePath) { write(html); close() }
}
}
} | DRouh/mdpeg | examples/src/main/scala/org/mdpeg/examples/ToHtmlExample.scala | Scala | apache-2.0 | 894 |
/*
* Copyright 2018 SN127.fi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package fi.sn127.tackler.api
import io.circe._
import io.circe.generic.semiauto._
final case class BalanceGroupReport(
metadata: Option[Metadata],
title: String,
groups: Seq[BalanceReport]
)
object BalanceGroupReport {
implicit val decodeBalanceGroupReport: Decoder[BalanceGroupReport] = deriveDecoder[BalanceGroupReport]
implicit val encodeBalanceGroupReport: Encoder[BalanceGroupReport] = deriveEncoder[BalanceGroupReport]
}
| jaa127/tackler | api/src/main/scala/fi/sn127/tackler/api/BalanceGroupReport.scala | Scala | apache-2.0 | 1,035 |
package reasonably.auth
import AuthAlgebra._
import reasonably.Algebra._
import reasonably.Tester._
object TestAuthenticator extends (Auth ~> Tester) {
def apply[A](a: Auth[A]) = a match {
case Login(uid, pwd) =>
if (uid == "john.snow" && pwd == "Ghost")
m => (Nil, Right(Some(User("john.snow"))))
else if (uid == "john.rain" && pwd == "Ghost")
m => (Nil, Right(Some(User("john.rain"))))
else m => (Nil, Right(None))
case HasPermission(u, _) =>
m => (Nil, Right(u.id == "john.snow"))
}
}
| enpassant/scalaz | src/test/scala/reasonably/auth/TestAuthenticator.scala | Scala | apache-2.0 | 542 |
package truerss.db
import java.time.{Clock, LocalDateTime}
import slick.jdbc.JdbcBackend.DatabaseDef
import truerss.db.driver.CurrentDriver
import truerss.services.NotFoundError
import zio.{IO, Task}
class SourcesDao(val db: DatabaseDef)(implicit
driver: CurrentDriver
) {
import JdbcTaskSupport._
import driver.StateSupport._
import driver.profile.api._
import driver.query.sources
def all: Task[Seq[Source]] = {
sources.result ~> db
}
def findOne(sourceId: Long): IO[NotFoundError, Source] = {
sources.filter(_.id === sourceId).take(1).result.headOption ~> db <~ sourceId
}
def delete(sourceId: Long): Task[Int] = {
sources.filter(_.id === sourceId).delete ~> db
}
def insert(source: Source): Task[Long] = {
((sources returning sources.map(_.id)) += source) ~> db
}
def insertMany(xs: Iterable[Source]) = {
(sources ++= xs) ~> db
}
def findByUrlsAndNames(urls: Seq[String], names: Seq[String]): Task[Seq[(String, String)]] = {
sources.filter(s => s.url.inSet(urls) || s.name.inSet(names))
.map(x => (x.url, x.name))
.result ~> db
}
def findByUrl(url: String, id: Option[Long]): Task[Int] = {
id
.map(id => sources.filter(s => s.url === url && !(s.id === id)))
.getOrElse(sources.filter(s => s.url === url))
.length
.result ~> db
}
def findByName(name: String, id: Option[Long]): Task[Int] = {
id.map(id => sources.filter(s => s.name === name && !(s.id === id)))
.getOrElse(sources.filter(s => s.name === name))
.length
.result ~> db
}
def updateSource(source: Source): Task[Int] = {
sources.filter(_.id === source.id)
.map(s => (s.url, s.name, s.interval, s.state, s.normalized))
.update(source.url, source.name, source.interval,
source.state, source.normalized) ~> db
}
def updateLastUpdateDate(sourceId: Long,
date: LocalDateTime = LocalDateTime.now(Clock.systemUTC())): Task[Int] = {
sources.filter(_.id === sourceId)
.map(s => s.lastUpdate).update(date) ~> db
}
def updateState(sourceId: Long, state: SourceState): Task[Int] = {
sources.filter(_.id === sourceId)
.map(_.state)
.update(state) ~> db
}
}
| truerss/truerss | src/main/scala/truerss/db/SourcesDao.scala | Scala | mit | 2,277 |
package vulkan.wrapper.registry.vtype
import vulkan.wrapper.registry.{Registry, VulkanComponentMappedData}
import scala.xml.Node
class VulkanStructType(registry: Registry, node: Node) extends VulkanMemberType(registry, node) {
}
object VulkanStructType {
def apply(registry: Registry): VulkanComponentMappedData[VulkanStructType] =
VulkanComponentMappedData(registry,(registry.xml \ "types" \ "type").filter(t => Set("struct","union").contains(t \@ "category")).map(new VulkanStructType(registry,_)))
}
| MrInformatic/VulkanWrapper | src/vulkan/wrapper/registry/vtype/VulkanStructType.scala | Scala | mit | 514 |
/* Copyright 2009-2021 EPFL, Lausanne */
import stainless.lang._
object Array1 {
def foo(a: Array[Int]): Int = {
a(2)
}
}
| epfl-lara/stainless | frontends/benchmarks/verification/invalid/Array1.scala | Scala | apache-2.0 | 134 |
package com.twitter.finagle.http2
import com.twitter.concurrent.AsyncQueue
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.Stack
import com.twitter.io.{Buf, Writer, Reader}
import com.twitter.util._
import io.netty.handler.codec.http.{HttpMessage, HttpRequest}
import java.net.{Socket, InetAddress, InetSocketAddress}
import org.scalatest.funsuite.AnyFunSuite
private object Http2ListenerTest {
def await[A](f: Future[A], to: Duration = 5.seconds) = Await.result(f, to)
class Ctx {
val recvdByServer = new AsyncQueue[Any]
private[this] val server = {
Http2Listener[HttpMessage, HttpMessage](Stack.Params.empty)
.listen(new InetSocketAddress(0)) { transport =>
transport.read().respond {
case Return(m) => recvdByServer.offer(m)
case Throw(exc) => recvdByServer.fail(exc)
}
}
}
private[this] val (writer, reader) = {
val port = server.boundAddress.asInstanceOf[InetSocketAddress].getPort
val socket = new Socket(InetAddress.getLoopbackAddress, port)
(
Writer.fromOutputStream(socket.getOutputStream),
Reader.fromStream(socket.getInputStream)
)
}
def write(message: String): Future[Unit] =
writer.write(Buf.Utf8(message))
def read(): Future[Option[String]] =
reader
.read().map(_.map {
case Buf.Utf8(message) => message
})
def close(): Future[Unit] = {
reader.discard()
Closable.all(writer, server).close()
}
}
}
class Http2ListenerTest extends AnyFunSuite {
import Http2ListenerTest._
test("Http2Listener should upgrade neatly")(new Ctx {
await(write("""GET http:/// HTTP/1.1
|x-http2-stream-id: 1
|upgrade: h2c
|HTTP2-Settings: AAEAABAAAAIAAAABAAN_____AAQAAP__AAUAAEAAAAZ_____
|connection: HTTP2-Settings,upgrade
|content-length: 0
|x-hello: world
|
|""".stripMargin.replaceAll("\\n", "\\r\\n")))
assert(await(read()).get == """HTTP/1.1 101 Switching Protocols
|connection: upgrade
|upgrade: h2c
|
|""".stripMargin.replaceAll("\\n", "\\r\\n"))
val req = await(recvdByServer.poll()).asInstanceOf[HttpRequest]
assert(req.headers.get("x-hello") == "world")
await(close())
})
test("Http2Listener should not upgrade with an invalid URI (non-ASCII)")(new Ctx {
await(write(s"""GET http:///DSC02175拷貝.jpg HTTP/1.1
|x-http2-stream-id: 1
|upgrade: h2c
|HTTP2-Settings: AAEAABAAAAIAAAABAAN_____AAQAAP__AAUAAEAAAAZ_____
|connection: HTTP2-Settings,upgrade
|content-length: 0
|x-hello: world
|
|""".stripMargin.replaceAll("\\n", "\\r\\n")))
assert(await(read()).get == """HTTP/1.0 400 Bad Request
|Connection: close
|Content-Length: 0
|
|""".stripMargin.replaceAll("\\n", "\\r\\n"))
await(close())
})
test("Http2Listener should not upgrade with an invalid URI (encoding)")(new Ctx {
await(write(s"""GET http:///1%%.jpg HTTP/1.1
|x-http2-stream-id: 1
|upgrade: h2c
|HTTP2-Settings: AAEAABAAAAIAAAABAAN_____AAQAAP__AAUAAEAAAAZ_____
|connection: HTTP2-Settings,upgrade
|content-length: 0
|x-hello: world
|
|""".stripMargin.replaceAll("\\n", "\\r\\n")))
assert(await(read()).get == """HTTP/1.0 400 Bad Request
|Connection: close
|Content-Length: 0
|
|""".stripMargin.replaceAll("\\n", "\\r\\n"))
await(close())
})
}
| twitter/finagle | finagle-http2/src/test/scala/com/twitter/finagle/http2/Http2ListenerTest.scala | Scala | apache-2.0 | 3,926 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import java.io.IOException
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.internal.io.FileCommitProtocol
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogTable, CatalogTablePartition}
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.SparkPlan
import org.apache.spark.sql.execution.command._
/**
* A command for writing data to a [[HadoopFsRelation]]. Supports both overwriting and appending.
* Writing to dynamic partitions is also supported.
*
* @param staticPartitions partial partitioning spec for write. This defines the scope of partition
* overwrites: when the spec is empty, all partitions are overwritten.
* When it covers a prefix of the partition keys, only partitions matching
* the prefix are overwritten.
* @param ifPartitionNotExists If true, only write if the partition does not exist.
* Only valid for static partitions.
*/
case class InsertIntoHadoopFsRelationCommand(
outputPath: Path,
staticPartitions: TablePartitionSpec,
ifPartitionNotExists: Boolean,
partitionColumns: Seq[Attribute],
bucketSpec: Option[BucketSpec],
fileFormat: FileFormat,
options: Map[String, String],
query: LogicalPlan,
mode: SaveMode,
catalogTable: Option[CatalogTable],
fileIndex: Option[FileIndex])
extends RunnableCommand {
import org.apache.spark.sql.catalyst.catalog.ExternalCatalogUtils.escapePathName
override def children: Seq[LogicalPlan] = query :: Nil
override def run(sparkSession: SparkSession, children: Seq[SparkPlan]): Seq[Row] = {
assert(children.length == 1)
// Most formats don't do well with duplicate columns, so lets not allow that
if (query.schema.fieldNames.length != query.schema.fieldNames.distinct.length) {
val duplicateColumns = query.schema.fieldNames.groupBy(identity).collect {
case (x, ys) if ys.length > 1 => "\\"" + x + "\\""
}.mkString(", ")
throw new AnalysisException(s"Duplicate column(s): $duplicateColumns found, " +
"cannot save to file.")
}
val hadoopConf = sparkSession.sessionState.newHadoopConfWithOptions(options)
val fs = outputPath.getFileSystem(hadoopConf)
val qualifiedOutputPath = outputPath.makeQualified(fs.getUri, fs.getWorkingDirectory)
val partitionsTrackedByCatalog = sparkSession.sessionState.conf.manageFilesourcePartitions &&
catalogTable.isDefined &&
catalogTable.get.partitionColumnNames.nonEmpty &&
catalogTable.get.tracksPartitionsInCatalog
var initialMatchingPartitions: Seq[TablePartitionSpec] = Nil
var customPartitionLocations: Map[TablePartitionSpec, String] = Map.empty
var matchingPartitions: Seq[CatalogTablePartition] = Seq.empty
// When partitions are tracked by the catalog, compute all custom partition locations that
// may be relevant to the insertion job.
if (partitionsTrackedByCatalog) {
matchingPartitions = sparkSession.sessionState.catalog.listPartitions(
catalogTable.get.identifier, Some(staticPartitions))
initialMatchingPartitions = matchingPartitions.map(_.spec)
customPartitionLocations = getCustomPartitionLocations(
fs, catalogTable.get, qualifiedOutputPath, matchingPartitions)
}
val pathExists = fs.exists(qualifiedOutputPath)
// If we are appending data to an existing dir.
val isAppend = pathExists && (mode == SaveMode.Append)
val committer = FileCommitProtocol.instantiate(
sparkSession.sessionState.conf.fileCommitProtocolClass,
jobId = java.util.UUID.randomUUID().toString,
outputPath = outputPath.toString,
isAppend = isAppend)
val doInsertion = (mode, pathExists) match {
case (SaveMode.ErrorIfExists, true) =>
throw new AnalysisException(s"path $qualifiedOutputPath already exists.")
case (SaveMode.Overwrite, true) =>
if (ifPartitionNotExists && matchingPartitions.nonEmpty) {
false
} else {
deleteMatchingPartitions(fs, qualifiedOutputPath, customPartitionLocations, committer)
true
}
case (SaveMode.Append, _) | (SaveMode.Overwrite, _) | (SaveMode.ErrorIfExists, false) =>
true
case (SaveMode.Ignore, exists) =>
!exists
case (s, exists) =>
throw new IllegalStateException(s"unsupported save mode $s ($exists)")
}
if (doInsertion) {
// Callback for updating metastore partition metadata after the insertion job completes.
def refreshPartitionsCallback(updatedPartitions: Seq[TablePartitionSpec]): Unit = {
if (partitionsTrackedByCatalog) {
val newPartitions = updatedPartitions.toSet -- initialMatchingPartitions
if (newPartitions.nonEmpty) {
AlterTableAddPartitionCommand(
catalogTable.get.identifier, newPartitions.toSeq.map(p => (p, None)),
ifNotExists = true).run(sparkSession)
}
if (mode == SaveMode.Overwrite) {
val deletedPartitions = initialMatchingPartitions.toSet -- updatedPartitions
if (deletedPartitions.nonEmpty) {
AlterTableDropPartitionCommand(
catalogTable.get.identifier, deletedPartitions.toSeq,
ifExists = true, purge = false,
retainData = true /* already deleted */).run(sparkSession)
}
}
}
}
FileFormatWriter.write(
sparkSession = sparkSession,
plan = children.head,
fileFormat = fileFormat,
committer = committer,
outputSpec = FileFormatWriter.OutputSpec(
qualifiedOutputPath.toString, customPartitionLocations),
hadoopConf = hadoopConf,
partitionColumns = partitionColumns,
bucketSpec = bucketSpec,
refreshFunction = refreshPartitionsCallback,
options = options)
// refresh cached files in FileIndex
fileIndex.foreach(_.refresh())
// refresh data cache if table is cached
sparkSession.catalog.refreshByPath(outputPath.toString)
} else {
logInfo("Skipping insertion into a relation that already exists.")
}
Seq.empty[Row]
}
/**
* Deletes all partition files that match the specified static prefix. Partitions with custom
* locations are also cleared based on the custom locations map given to this class.
*/
private def deleteMatchingPartitions(
fs: FileSystem,
qualifiedOutputPath: Path,
customPartitionLocations: Map[TablePartitionSpec, String],
committer: FileCommitProtocol): Unit = {
val staticPartitionPrefix = if (staticPartitions.nonEmpty) {
"/" + partitionColumns.flatMap { p =>
staticPartitions.get(p.name) match {
case Some(value) =>
Some(escapePathName(p.name) + "=" + escapePathName(value))
case None =>
None
}
}.mkString("/")
} else {
""
}
// first clear the path determined by the static partition keys (e.g. /table/foo=1)
val staticPrefixPath = qualifiedOutputPath.suffix(staticPartitionPrefix)
if (fs.exists(staticPrefixPath) && !committer.deleteWithJob(fs, staticPrefixPath, true)) {
throw new IOException(s"Unable to clear output " +
s"directory $staticPrefixPath prior to writing to it")
}
// now clear all custom partition locations (e.g. /custom/dir/where/foo=2/bar=4)
for ((spec, customLoc) <- customPartitionLocations) {
assert(
(staticPartitions.toSet -- spec).isEmpty,
"Custom partition location did not match static partitioning keys")
val path = new Path(customLoc)
if (fs.exists(path) && !committer.deleteWithJob(fs, path, true)) {
throw new IOException(s"Unable to clear partition " +
s"directory $path prior to writing to it")
}
}
}
/**
* Given a set of input partitions, returns those that have locations that differ from the
* Hive default (e.g. /k1=v1/k2=v2). These partitions were manually assigned locations by
* the user.
*
* @return a mapping from partition specs to their custom locations
*/
private def getCustomPartitionLocations(
fs: FileSystem,
table: CatalogTable,
qualifiedOutputPath: Path,
partitions: Seq[CatalogTablePartition]): Map[TablePartitionSpec, String] = {
partitions.flatMap { p =>
val defaultLocation = qualifiedOutputPath.suffix(
"/" + PartitioningUtils.getPathFragment(p.spec, table.partitionSchema)).toString
val catalogLocation = new Path(p.location).makeQualified(
fs.getUri, fs.getWorkingDirectory).toString
if (catalogLocation != defaultLocation) {
Some(p.spec -> catalogLocation)
} else {
None
}
}.toMap
}
}
| bOOm-X/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/InsertIntoHadoopFsRelationCommand.scala | Scala | apache-2.0 | 9,913 |
package scalaz
package std
sealed trait EitherInstances0 {
implicit def eitherEqual[A, B](implicit A0: Equal[A], B0: Equal[B]): Equal[Either[A, B]] =
new EitherEqual[A, B] {
implicit def A =
A0
implicit def B =
B0
}
}
trait EitherInstances extends EitherInstances0 {
implicit val eitherInstance: Bitraverse[Either] =
new Bitraverse[Either] {
override def bimap[A, B, C, D](fab: Either[A, B])(f: A => C, g: B => D) =
fab match {
case Left (a) =>
Left(f(a))
case Right (b) =>
Right(g(b))
}
def bitraverseImpl[G[_]: Applicative, A, B, C, D](fab: Either[A, B])(f: A => G[C], g: B => G[D]) =
fab match {
case Left (a) =>
Applicative[G].map(f(a))((b) => Left(b))
case Right (b) =>
Applicative[G].map(g(b))((d) => Right(d))
}
}
implicit def eitherMonad[L]: Traverse[Either[L, *]] with MonadError[Either[L, *], L] with BindRec[Either[L, *]] with Cozip[Either[L, *]] =
new Traverse[Either[L, *]] with MonadError[Either[L, *], L] with BindRec[Either[L, *]] with Cozip[Either[L, *]] {
def bind[A, B](fa: Either[L, A])(f: A => Either[L, B]) =
fa match {
case Left (a) =>
Left(a)
case Right (b) =>
f(b)
}
override def map[A, B](fa: Either[L, A])(f: A => B) =
fa match {
case Right (b) =>
Right(f(b))
case a =>
a.asInstanceOf[Either[L, B]]
}
override def apply2[A, B, C](fa: => Either[L, A], fb: => Either[L, B])(f: (A, B) => C): Either[L, C] =
fa match {
case Right (a) =>
fb match {
case Right (b) =>
Right(f(a, b))
case e =>
e.asInstanceOf[Either[L, C]]
}
case e =>
e.asInstanceOf[Either[L, C]]
}
def handleError[A](fa: Either[L, A])(f: L => Either[L, A]) =
fa match {
case a @ Right (_) =>
a
case Left (a) =>
f(a)
}
def raiseError[A](e: L) =
Left(e)
def point[A](a: => A) =
Right(a)
def traverseImpl[G[_]: Applicative, A, B](fa: Either[L, A])(f: A => G[B]) =
fa match {
case Left (x) =>
Applicative[G].point(Left(x))
case Right (x) =>
Applicative[G].map(f(x))(Right(_))
}
override def foldRight[A, B](fa: Either[L, A], z: => B)(f: (A, => B) => B) =
fa match {
case Left (_) =>
z
case Right (a) =>
f(a, z)
}
def cozip[A, B](a: Either[L, A \\/ B]) =
a match {
case Left (l) =>
-\\/(Left(l))
case Right (e) =>
e match {
case -\\/ (a) =>
-\\/(Right(a))
case \\/- (b) =>
\\/-(Right(b))
}
}
@scala.annotation.tailrec def tailrecM[A, B](a: A)(f: A => Either[L, A \\/ B]): Either[L, B] =
f(a) match {
case Left (l) =>
Left(l)
case Right (-\\/ (a)) =>
tailrecM(a)(f)
case Right (\\/- (b)) =>
Right(b)
}
}
implicit def eitherOrder[A, B](implicit OrderA: Order[A], OrderB: Order[B]): Order[Either[A, B]] =
new EitherOrder[A, B] {
implicit def A =
OrderA
implicit def B =
OrderB
}
implicit def eitherAssociative: Associative[Either] =
new Associative[Either] {
override def reassociateLeft[A, B, C](f: Either[A, Either[B, C]]): Either[Either[A, B], C] =
f.fold((a) => Left(Left(a)), _.fold((b) => Left(Right(b)), Right(_)))
override def reassociateRight[A, B, C](f: Either[Either[A, B], C]): Either[A, Either[B, C]] =
f.fold(_.fold(Left(_), (b) => Right(Left(b))), (c) => Right(Right(c)))
}
implicit def eitherShow[A, B](implicit SA: Show[A], SB: Show[B]): Show[Either[A, B]] =
{
import scalaz.syntax.show._
Show.show( {
case Left (a) =>
cord"Left(${a})"
case Right (b) =>
cord"Right(${b})"
} )
}
}
object either extends EitherInstances {
}
private trait EitherEqual[A, B] extends Equal[Either[A, B]] {
implicit def A: Equal[A]
implicit def B: Equal[B]
final override def equal(f1: Either[A, B], f2: Either[A, B]) =
(f1, f2) match {
case (Left (a1), Left (a2)) =>
A.equal(a1, a2)
case (Right (b1), Right (b2)) =>
B.equal(b1, b2)
case (Right (_), Left (_)) | (Left (_), Right (_)) =>
false
}
override val equalIsNatural: Boolean =
A.equalIsNatural && B.equalIsNatural
}
private trait EitherOrder[A, B] extends Order[Either[A, B]] with EitherEqual[A, B] {
implicit def A: Order[A]
implicit def B: Order[B]
import Ordering._
def order(f1: Either[A, B], f2: Either[A, B]) =
(f1, f2) match {
case (Left (x), Left (y)) =>
A.order(x, y)
case (Right (x), Right (y)) =>
B.order(x, y)
case (Left (_), Right (_)) =>
LT
case (Right (_), Left (_)) =>
GT
}
} | puffnfresh/language-scala | test/fixtures/Either.scala.json.golden.scala | Scala | mit | 5,233 |
package org.openurp.edu.eams.teach.schedule.web.action
import java.util.Arrays
import org.apache.commons.lang3.ArrayUtils
import org.beangle.commons.collection.Collections
import org.beangle.data.jpa.dao.OqlBuilder
import org.beangle.commons.entity.metadata.Model
import org.openurp.base.Department
import org.openurp.base.Semester
import org.openurp.edu.base.Project
import org.openurp.edu.teach.lesson.Lesson
import org.openurp.edu.eams.teach.lesson.helper.LessonSearchHelper
import org.openurp.edu.eams.teach.lesson.model.CourseScheduleBean.CourseStatusEnum
import org.openurp.edu.eams.teach.lesson.service.LessonService
import org.openurp.edu.eams.teach.schedule.model.LessonForDepart
import org.openurp.edu.eams.web.action.common.SemesterSupportAction
class AllocateLessonForArrangeAction extends SemesterSupportAction {
var lessonSearchHelper: LessonSearchHelper = _
var lessonService: LessonService = _
private def getSemester(): Semester = {
val semesterId = getInt("semester.id")
if (semesterId == null) (if (getAttribute("semester") == null) semesterService.getCurSemester(getProject) else getAttribute("semester").asInstanceOf[Semester]) else entityDao.get(classOf[Semester],
semesterId)
}
private def getLessonForDepartsBySemester(semester: Semester, departments: List[Department]): OqlBuilder[LessonForDepart] = {
val builder = OqlBuilder.from(classOf[LessonForDepart], "lessonForDepart")
.where("lessonForDepart.semester = :semester", semester)
.where("lessonForDepart.project = :project", getProject)
if (departments.isEmpty) {
builder.where("1=2")
} else {
builder.where("lessonForDepart.department in (:departments)", departments)
}
builder
}
private def getWastedLessonIds(semester: Semester): Map[Department, Set[Long]] = {
val builder = OqlBuilder.from(classOf[LessonForDepart], "lessonForDepart")
builder.join("lessonForDepart.lessonIds", "lessonId")
builder.where("not exists(from org.openurp.edu.teach.lesson.Lesson lesson where lessonId=lesson.id)")
builder.where("lessonForDepart.semester = :semester", semester)
builder.where("lessonForDepart.project = :project", getProject)
builder.select("lessonForDepart,lessonId")
val lessonForDepartAndLessonIds = entityDao.search(builder).asInstanceOf[List[Array[Any]]]
val wastedLessonIds = Collections.newMap[Any]
for (objects <- lessonForDepartAndLessonIds) {
val department = objects(0).asInstanceOf[LessonForDepart].department
val lessonId = objects(1).asInstanceOf[java.lang.Long]
if (wastedLessonIds.keySet.contains(department)) {
wastedLessonIds.get(department).add(lessonId)
} else {
wastedLessonIds.put(department, Collections.newHashSet(lessonId))
}
}
wastedLessonIds
}
override def index(): String = {
setSemesterDataRealm(hasStdTypeCollege)
val semester = getSemester
val project = getProject
if (semester == null) {
return forwardError("error.parameters.needed")
}
val builder = OqlBuilder.from(classOf[LessonForDepart], "lessonForDepart")
builder.join("lessonForDepart.lessonIds", "lessonId")
builder.where("not exists(from org.openurp.edu.teach.lesson.Lesson lesson where lessonId=lesson.id)")
builder.where("lessonForDepart.semester = :semester", semester)
builder.where("lessonForDepart.project = :project", project)
builder.select("lessonForDepart,lessonId")
val lessonForDepartAndLessonIds = entityDao.search(builder).asInstanceOf[List[Array[Any]]]
val toSave = Collections.newBuffer[Any]
val wastedLessonIds = Collections.newMap[Any]
for (objects <- lessonForDepartAndLessonIds) {
val lessonForDepart = objects(0).asInstanceOf[LessonForDepart]
val lessonId = objects(1).asInstanceOf[java.lang.Long]
if (wastedLessonIds.keySet.contains(lessonForDepart)) {
wastedLessonIds.get(lessonForDepart).add(lessonId)
} else {
wastedLessonIds.put(lessonForDepart, Collections.newHashSet(lessonId))
}
}
for (lessonForDepart <- wastedLessonIds.keySet) {
lessonForDepart.getLessonIds.removeAll(wastedLessonIds.get(lessonForDepart))
toSave.add(lessonForDepart)
}
entityDao.saveOrUpdate(toSave)
val departmentQuery = OqlBuilder.from(classOf[LessonForDepart], "lfd").where("lfd.semester= :semester",
semester)
.where("lfd.project = :project", project)
.select("select distinct lfd.department")
val departments = entityDao.search(departmentQuery).asInstanceOf[List[Department]]
val departs = getDeparts
var lessons: List[Lesson] = null
if (departs.isEmpty) {
lessons = Collections.emptyList()
} else {
val query = OqlBuilder.from(classOf[Lesson], "lesson")
query.where("lesson.teachDepart in (:departments)", departs)
query.where("lesson.semester = :semester", semester)
query.where("lesson.project = :project", project)
lessons = entityDao.search(query)
}
val departmentLesson = Collections.newMap[Any]
val lessonForDeparts = entityDao.search(getLessonForDepartsBySemester(semester, departments))
var size = 0
for (lessonForDepart <- lessonForDeparts) {
val department = lessonForDepart.department
val lessonIds = lessonForDepart.getLessonIds
departmentLesson.put(department, lessonIds.size)
size += lessonIds.size
}
val keySet = departmentLesson.keySet
for (department <- departments if !keySet.contains(department)) {
departmentLesson.put(department, 0)
}
put("departmentMap", departmentLesson)
put("notAllocateSize", lessons.size - size)
put("totalSize", lessons.size)
put("semester", semester)
val defaultDepartmentId = getLong("defaultDepartmentId")
if (null != defaultDepartmentId) {
put("defaultDepartmentId", defaultDepartmentId)
}
var defaultNotLocate = getBoolean("defaultNotLocate")
if (defaultNotLocate == null) {
defaultNotLocate = true
}
put("defaultNotLocate", defaultNotLocate)
forward()
}
override def search(): String = {
val builder = lessonSearchHelper.buildQuery()
builder.where("lesson.project.id=:projectid1", getSession.get("projectId").asInstanceOf[java.lang.Integer])
val semester = getSemester
val notLocate = getBool("notLocate")
val departmentId = getInt("departmentId")
val departments = lessonService.teachDepartsOfSemester(Collections.newBuffer[Any](getProject), getDeparts,
semester)
put("notLocate", notLocate)
if (notLocate) {
builder.where("not exists (from org.openurp.edu.eams.teach.schedule.model.LessonForDepart lfd join lfd.lessonIds lessonId where lesson.id = lessonId)")
put("departmentsToLocate", departments)
val locateDepartmentId = getInt("locateDepartmentId")
if (null != locateDepartmentId) {
put("lessons", entityDao.search(builder))
put("courseStatusEnums", CourseStatusEnum.values)
put("semester", semester)
put("locateDepartment", entityDao.get(classOf[Department], locateDepartmentId))
return forward()
}
}
if (null != departmentId) {
builder.where("exists (from org.openurp.edu.eams.teach.schedule.model.LessonForDepart lfd join lfd.lessonIds lessonId where lesson.id = lessonId and lfd.department.id = " +
departmentId +
")")
put("department", entityDao.get(classOf[Department], departmentId))
}
put("lessons", entityDao.search(builder))
put("courseStatusEnums", CourseStatusEnum.values)
put("semester", semester)
if (!notLocate && departmentId == null) {
val lessonDepartMap = Collections.newMap[Any]
val lessonForDeparts = entityDao.search(getLessonForDepartsBySemester(semester, departments))
for (lessonForDepart <- lessonForDeparts; lessonId <- lessonForDepart.getLessonIds) {
lessonDepartMap.put(entityDao.get(classOf[Lesson], lessonId), lessonForDepart.department)
}
put("lessonDepartMap", lessonDepartMap)
}
put("teachDeparts", departments)
forward()
}
def batchEditArrangeTime(): String = {
val semester = getSemester
val departments = lessonService.teachDepartsOfSemester(Collections.newBuffer[Any](getProject), getDeparts,
semester)
val lessonForDeparts = entityDao.search(getLessonForDepartsBySemester(getSemester, departments))
var lessonForDepartIds = ""
for (i <- 0 until lessonForDeparts.size) {
if (i > 0) {
lessonForDepartIds += ","
}
lessonForDepartIds += lessonForDeparts.get(i).id
}
put("lessonForDeparts", lessonForDeparts)
put("semester", semester)
put("lessonForDepartIds", lessonForDepartIds)
forward()
}
def batchSaveArrangeTime(): String = {
val lessonForDepartIds = getLongIds("lessonForDepart")
val lessonForDeparts = entityDao.get(classOf[LessonForDepart], lessonForDepartIds)
for (lessonForDepart <- lessonForDeparts) {
lessonForDepart.setBeginAt(getDateTime("lessonForDepart" + lessonForDepart.id + ".beginAt"))
lessonForDepart.setEndAt(getDateTime("lessonForDepart" + lessonForDepart.id + ".endAt"))
}
try {
entityDao.saveOrUpdate(lessonForDeparts)
redirect("index", "info.save.success")
} catch {
case e: Exception => redirect("index", "info.save.failure")
}
}
override def save(): String = {
val departmentId = getIntId("department")
val lessonIds = getLongIds("lesson")
if (null == departmentId || ArrayUtils.isEmpty(lessonIds)) {
return forwardError("error.parameters.needed")
}
val semester = getSemester
val project = getProject
val department = Model.newInstance(classOf[Department], departmentId)
val lessons = entityDao.get(classOf[Lesson], lessonIds)
if (lessonIds.length != lessons.size) {
return forwardError("所选任务可能已被删除")
}
val lessonIdSet = Collections.newHashSet(Arrays.asList(lessonIds:_*))
val lessonForDeparts = entityDao.search(OqlBuilder.from(classOf[LessonForDepart], "lessonForDepart")
.where("lessonForDepart.department.id = :departmentId", departmentId)
.where("lessonForDepart.semester = :semester", semester)
.where("lessonForDepart.project = :project", project))
var lessonForDepart: LessonForDepart = null
if (lessonForDeparts.isEmpty) {
lessonForDepart = new LessonForDepart(lessonIdSet, department, semester, project)
} else {
lessonForDepart = lessonForDeparts.get(0)
lessonForDepart.addLessonIds(lessonIdSet)
}
try {
entityDao.saveOrUpdate(Collections.singletonList(lessonForDepart))
redirect("index", "info.save.success")
} catch {
case e: Exception => {
logger.info("saveAndForwad failure", e)
redirect("index", "info.save.failure")
}
}
}
override def remove(): String = {
val departmentId = getIntId("department")
val notLocate = getBool("notLocate")
val lessonIds = getLongIds("lesson")
if (null == departmentId || ArrayUtils.isEmpty(lessonIds)) {
return forwardError("error.parameters.needed")
}
val semester = getSemester
val project = getProject
val lessonForDeparts = entityDao.search(OqlBuilder.from(classOf[LessonForDepart], "lessonForDepart")
.where("lessonForDepart.department.id = :departmentId", departmentId)
.where("lessonForDepart.semester = :semester", semester)
.where("lessonForDepart.project = :project", project))
if (lessonForDeparts.isEmpty) {
return forwardError("error.parameters.illegal")
}
val lessonForDepart = lessonForDeparts.get(0)
lessonForDepart.removeLessonIds(Arrays.asList(lessonIds:_*))
try {
if (lessonForDepart.getLessonIds.isEmpty) {
entityDao.remove(Collections.singletonList(lessonForDepart))
} else {
entityDao.saveOrUpdate(Collections.singletonList(lessonForDepart))
}
redirect("index", "info.save.success", "&defaultDepartmentId=" + departmentId + "&defaultNotLocate=" +
notLocate)
} catch {
case e: Exception => {
logger.info("saveAndForwad failure", e)
redirect("index", "info.save.failure", "&defaultDepartmentId=" + departmentId + "&defaultNotLocate=" +
notLocate)
}
}
}
def autoLocate(): String = {
val semester = getSemester
val project = getProject
val builder = OqlBuilder.from(classOf[Lesson], "lesson")
builder.where("not exists(from org.openurp.edu.eams.teach.schedule.model.LessonForDepart lessonForDepart join lessonForDepart.lessonIds lessonId where lesson.id = lessonId)")
.where("lesson.semester = :semester", semester)
.where("lesson.project = :project", project)
val lessons = entityDao.search(builder)
val departmentRestricts = lessonService.teachDepartsOfSemester(Collections.newBuffer[Any](project),
getDeparts, semester)
val lessonForDeparts = entityDao.search(getLessonForDepartsBySemester(semester, departmentRestricts))
val lessonForDepartMap = Collections.newMap[Any]
for (lessonForDepart <- lessonForDeparts) {
lessonForDepartMap.put(lessonForDepart.department, lessonForDepart)
}
val departments = lessonForDepartMap.keySet
for (lesson <- lessons) {
val department = lesson.getTeachDepart
val lessonId = lesson.id
var lessonForDepart: LessonForDepart = null
if (departments.contains(department)) {
lessonForDepart = lessonForDepartMap.get(department)
lessonForDepart.addLessonId(lessonId)
} else {
lessonForDepart = new LessonForDepart()
lessonForDepart.setDepartment(department)
lessonForDepart.addLessonId(lessonId)
lessonForDepart.setProject(project)
lessonForDepart.setSemester(semester)
lessonForDepartMap.put(department, lessonForDepart)
}
}
try {
entityDao.saveOrUpdate(lessonForDepartMap.values)
redirect("index", "info.save.success")
} catch {
case e: Exception => {
logger.info("saveAndForwad failure", e)
redirect("index", "info.save.failure")
}
}
}
}
| openurp/edu-eams-webapp | schedule/src/main/scala/org/openurp/edu/eams/teach/schedule/web/action/AllocateLessonForArrangeAction.scala | Scala | gpl-3.0 | 14,222 |
package com.example.http4s
package blaze
import com.example.http4s.ssl.SslExample
import org.http4s.server.blaze.BlazeBuilder
object BlazeSslExample extends SslExample {
def builder = BlazeBuilder
}
| hvesalai/http4s | examples/blaze/src/main/scala/com/example/http4s/blaze/BlazeSslExample.scala | Scala | apache-2.0 | 203 |
/*
* Copyright 2014 Frédéric Cabestre
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sigusr.mqtt.impl.protocol
import net.sigusr.mqtt.api.ConnectionFailureReason.ServerNotResponding
import net.sigusr.mqtt.api.QualityOfService.{ AtLeastOnce, AtMostOnce, ExactlyOnce }
import net.sigusr.mqtt.api._
import net.sigusr.mqtt.impl.frames._
import net.sigusr.mqtt.impl.protocol.Registers.RegistersState
import scalaz.State._
import scodec.bits.ByteVector
trait Handlers {
private val zeroId = MessageId(0)
private[protocol] def handleApiConnect(connect: Connect): RegistersState[Action] = gets { registers =>
val header = Header(dup = false, AtMostOnce.value)
val retain = connect.will.fold(false)(_.retain)
val qos = connect.will.fold(AtMostOnce.value)(_.qos.value)
val topic = connect.will.map(_.topic)
val message = connect.will.map(_.message)
val variableHeader = ConnectVariableHeader(
connect.user.isDefined,
connect.password.isDefined,
willRetain = retain,
qos,
willFlag = connect.will.isDefined,
connect.cleanSession,
connect.keepAlive)
val actions = Seq(
SetKeepAlive(connect.keepAlive.toLong * 1000),
SendToNetwork(ConnectFrame(header, variableHeader, connect.clientId, topic, message, connect.user, connect.password)))
Sequence(if (!connect.cleanSession) actions ++ registers.inFlightSentFrame.toSeq.map(p => SendToNetwork(p._2)) else actions)
}
private[protocol] def handleApiCommand(apiCommand: APICommand): RegistersState[Action] = gets { registers =>
apiCommand match {
case Connect(clientId, keepAlive, cleanSession, will, user, password) =>
SendToClient(Error(AlreadyConnected))
case Disconnect =>
val header = Header(dup = false, AtMostOnce.value)
SendToNetwork(DisconnectFrame(header))
case Publish(topic, payload, qos, messageId, retain) if qos == AtMostOnce =>
val header = Header(dup = false, qos.value, retain = retain)
SendToNetwork(PublishFrame(header, topic, messageId.getOrElse(zeroId).identifier, ByteVector(payload)))
case Publish(topic, payload, qos, Some(messageId), retain) =>
val header = Header(dup = false, qos.value, retain = retain)
val frame = PublishFrame(header, topic, messageId.identifier, ByteVector(payload))
Sequence(Seq(
StoreSentInFlightFrame(messageId.identifier, PublishFrame.dupLens.set(frame)(true)),
SendToNetwork(frame)))
case Subscribe(topics, messageId) =>
val header = Header(dup = false, AtLeastOnce.value)
SendToNetwork(SubscribeFrame(header, messageId.identifier, topics.map((v: (String, QualityOfService)) => (v._1, v._2.value))))
case Unsubscribe(topics, messageId) =>
val header = Header(dup = false, AtLeastOnce.value)
SendToNetwork(UnsubscribeFrame(header, messageId.identifier, topics))
case Status =>
SendToClient(Connected)
}
}
private[protocol] def handleNetworkFrames(frame: Frame): RegistersState[Action] = gets { registers =>
frame match {
case ConnackFrame(_, 0) =>
if (registers.keepAlive == 0) SendToClient(Connected)
else Sequence(Seq(
StartPingRespTimer(registers.keepAlive),
SendToClient(Connected)))
case ConnackFrame(_, returnCode) =>
SendToClient(ConnectionFailure(ConnectionFailureReason.withValue(returnCode)))
case PingRespFrame(_) =>
SetPendingPingResponse(isPending = false)
case PublishFrame(header, topic, messageIdentifier, payload) =>
val toClient = SendToClient(Message(topic, payload.toArray.toVector))
header.qos match {
case AtMostOnce.value =>
toClient
case AtLeastOnce.value =>
Sequence(Seq(
toClient,
SendToNetwork(PubackFrame(Header(), messageIdentifier))))
case ExactlyOnce.value =>
if (registers.inFlightRecvFrame(messageIdentifier))
Sequence(Seq(
SendToNetwork(PubrecFrame(Header(), messageIdentifier))))
else
Sequence(Seq(
toClient,
StoreRecvInFlightFrameId(messageIdentifier),
SendToNetwork(PubrecFrame(Header(), messageIdentifier))))
}
case PubackFrame(_, messageId) =>
Sequence(Seq(
RemoveSentInFlightFrame(messageId),
SendToClient(Published(messageId))))
case PubrecFrame(header, messageIdentifier) =>
val pubrelFrame = PubrelFrame(header.copy(qos = 1), messageIdentifier)
Sequence(Seq(
RemoveSentInFlightFrame(messageIdentifier),
StoreSentInFlightFrame(messageIdentifier.identifier, PubrelFrame.dupLens.set(pubrelFrame)(true)),
SendToNetwork(pubrelFrame)))
case PubrelFrame(header, messageIdentifier) =>
Sequence(Seq(
RemoveRecvInFlightFrameId(messageIdentifier),
SendToNetwork(PubcompFrame(header.copy(qos = 0), messageIdentifier))))
case PubcompFrame(_, messageId) =>
Sequence(Seq(
RemoveSentInFlightFrame(messageId),
SendToClient(Published(messageId))))
case SubackFrame(_, messageIdentifier, topicResults) =>
SendToClient(Subscribed(topicResults.map(QualityOfService.withValue), messageIdentifier.identifier))
case UnsubackFrame(_, messageId) =>
SendToClient(Unsubscribed(messageId))
case _ => ForciblyCloseTransport
}
}
private[protocol] def timerSignal(currentTime: Long): RegistersState[Action] = gets { registers =>
if (registers.isPingResponsePending)
ForciblyCloseTransport
else {
val timeout = registers.keepAlive - currentTime + registers.lastSentMessageTimestamp
if (timeout < 1000)
Sequence(Seq(
SetPendingPingResponse(isPending = true),
StartPingRespTimer(registers.keepAlive),
SendToNetwork(PingReqFrame(Header(dup = false, AtMostOnce.value)))))
else
StartPingRespTimer(timeout)
}
}
private[protocol] def connectionClosed(): Action = SendToClient(Disconnected)
private[protocol] def transportNotReady(): Action = SendToClient(ConnectionFailure(ServerNotResponding))
}
| fcabestre/Scala-MQTT-client | core/src/main/scala/net/sigusr/mqtt/impl/protocol/Handlers.scala | Scala | apache-2.0 | 6,781 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.json
import java.io.{File, StringWriter}
import java.nio.charset.StandardCharsets
import java.sql.{Date, Timestamp}
import java.util.Locale
import com.fasterxml.jackson.core.JsonFactory
import org.apache.hadoop.fs.{Path, PathFilter}
import org.apache.hadoop.io.SequenceFile.CompressionType
import org.apache.hadoop.io.compress.GzipCodec
import org.apache.spark.rdd.RDD
import org.apache.spark.SparkException
import org.apache.spark.sql.{functions => F, _}
import org.apache.spark.sql.catalyst.json.{CreateJacksonParser, JacksonParser, JSONOptions}
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.execution.ExternalRDD
import org.apache.spark.sql.execution.datasources.DataSource
import org.apache.spark.sql.execution.datasources.json.JsonInferSchema.compatibleType
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
class TestFileFilter extends PathFilter {
override def accept(path: Path): Boolean = path.getParent.getName != "p=2"
}
class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData {
import testImplicits._
test("Type promotion") {
def checkTypePromotion(expected: Any, actual: Any) {
assert(expected.getClass == actual.getClass,
s"Failed to promote ${actual.getClass} to ${expected.getClass}.")
assert(expected == actual,
s"Promoted value ${actual}(${actual.getClass}) does not equal the expected value " +
s"${expected}(${expected.getClass}).")
}
val factory = new JsonFactory()
def enforceCorrectType(value: Any, dataType: DataType): Any = {
val writer = new StringWriter()
Utils.tryWithResource(factory.createGenerator(writer)) { generator =>
generator.writeObject(value)
generator.flush()
}
val dummyOption = new JSONOptions(Map.empty[String, String], "GMT")
val dummySchema = StructType(Seq.empty)
val parser = new JacksonParser(dummySchema, dummyOption)
Utils.tryWithResource(factory.createParser(writer.toString)) { jsonParser =>
jsonParser.nextToken()
val converter = parser.makeConverter(dataType)
converter.apply(jsonParser)
}
}
val intNumber: Int = 2147483647
checkTypePromotion(intNumber, enforceCorrectType(intNumber, IntegerType))
checkTypePromotion(intNumber.toLong, enforceCorrectType(intNumber, LongType))
checkTypePromotion(intNumber.toDouble, enforceCorrectType(intNumber, DoubleType))
checkTypePromotion(
Decimal(intNumber), enforceCorrectType(intNumber, DecimalType.SYSTEM_DEFAULT))
val longNumber: Long = 9223372036854775807L
checkTypePromotion(longNumber, enforceCorrectType(longNumber, LongType))
checkTypePromotion(longNumber.toDouble, enforceCorrectType(longNumber, DoubleType))
checkTypePromotion(
Decimal(longNumber), enforceCorrectType(longNumber, DecimalType.SYSTEM_DEFAULT))
val doubleNumber: Double = 1.7976931348623157E308d
checkTypePromotion(doubleNumber.toDouble, enforceCorrectType(doubleNumber, DoubleType))
checkTypePromotion(DateTimeUtils.fromJavaTimestamp(new Timestamp(intNumber * 1000L)),
enforceCorrectType(intNumber, TimestampType))
checkTypePromotion(DateTimeUtils.fromJavaTimestamp(new Timestamp(intNumber.toLong * 1000L)),
enforceCorrectType(intNumber.toLong, TimestampType))
val strTime = "2014-09-30 12:34:56"
checkTypePromotion(DateTimeUtils.fromJavaTimestamp(Timestamp.valueOf(strTime)),
enforceCorrectType(strTime, TimestampType))
val strDate = "2014-10-15"
checkTypePromotion(
DateTimeUtils.fromJavaDate(Date.valueOf(strDate)), enforceCorrectType(strDate, DateType))
val ISO8601Time1 = "1970-01-01T01:00:01.0Z"
val ISO8601Time2 = "1970-01-01T02:00:01-01:00"
checkTypePromotion(DateTimeUtils.fromJavaTimestamp(new Timestamp(3601000)),
enforceCorrectType(ISO8601Time1, TimestampType))
checkTypePromotion(DateTimeUtils.fromJavaTimestamp(new Timestamp(10801000)),
enforceCorrectType(ISO8601Time2, TimestampType))
val ISO8601Date = "1970-01-01"
checkTypePromotion(DateTimeUtils.millisToDays(32400000),
enforceCorrectType(ISO8601Date, DateType))
}
test("Get compatible type") {
def checkDataType(t1: DataType, t2: DataType, expected: DataType) {
var actual = compatibleType(t1, t2)
assert(actual == expected,
s"Expected $expected as the most general data type for $t1 and $t2, found $actual")
actual = compatibleType(t2, t1)
assert(actual == expected,
s"Expected $expected as the most general data type for $t1 and $t2, found $actual")
}
// NullType
checkDataType(NullType, BooleanType, BooleanType)
checkDataType(NullType, IntegerType, IntegerType)
checkDataType(NullType, LongType, LongType)
checkDataType(NullType, DoubleType, DoubleType)
checkDataType(NullType, DecimalType.SYSTEM_DEFAULT, DecimalType.SYSTEM_DEFAULT)
checkDataType(NullType, StringType, StringType)
checkDataType(NullType, ArrayType(IntegerType), ArrayType(IntegerType))
checkDataType(NullType, StructType(Nil), StructType(Nil))
checkDataType(NullType, NullType, NullType)
// BooleanType
checkDataType(BooleanType, BooleanType, BooleanType)
checkDataType(BooleanType, IntegerType, StringType)
checkDataType(BooleanType, LongType, StringType)
checkDataType(BooleanType, DoubleType, StringType)
checkDataType(BooleanType, DecimalType.SYSTEM_DEFAULT, StringType)
checkDataType(BooleanType, StringType, StringType)
checkDataType(BooleanType, ArrayType(IntegerType), StringType)
checkDataType(BooleanType, StructType(Nil), StringType)
// IntegerType
checkDataType(IntegerType, IntegerType, IntegerType)
checkDataType(IntegerType, LongType, LongType)
checkDataType(IntegerType, DoubleType, DoubleType)
checkDataType(IntegerType, DecimalType.SYSTEM_DEFAULT, DecimalType.SYSTEM_DEFAULT)
checkDataType(IntegerType, StringType, StringType)
checkDataType(IntegerType, ArrayType(IntegerType), StringType)
checkDataType(IntegerType, StructType(Nil), StringType)
// LongType
checkDataType(LongType, LongType, LongType)
checkDataType(LongType, DoubleType, DoubleType)
checkDataType(LongType, DecimalType.SYSTEM_DEFAULT, DecimalType.SYSTEM_DEFAULT)
checkDataType(LongType, StringType, StringType)
checkDataType(LongType, ArrayType(IntegerType), StringType)
checkDataType(LongType, StructType(Nil), StringType)
// DoubleType
checkDataType(DoubleType, DoubleType, DoubleType)
checkDataType(DoubleType, DecimalType.SYSTEM_DEFAULT, DoubleType)
checkDataType(DoubleType, StringType, StringType)
checkDataType(DoubleType, ArrayType(IntegerType), StringType)
checkDataType(DoubleType, StructType(Nil), StringType)
// DecimalType
checkDataType(DecimalType.SYSTEM_DEFAULT, DecimalType.SYSTEM_DEFAULT,
DecimalType.SYSTEM_DEFAULT)
checkDataType(DecimalType.SYSTEM_DEFAULT, StringType, StringType)
checkDataType(DecimalType.SYSTEM_DEFAULT, ArrayType(IntegerType), StringType)
checkDataType(DecimalType.SYSTEM_DEFAULT, StructType(Nil), StringType)
// StringType
checkDataType(StringType, StringType, StringType)
checkDataType(StringType, ArrayType(IntegerType), StringType)
checkDataType(StringType, StructType(Nil), StringType)
// ArrayType
checkDataType(ArrayType(IntegerType), ArrayType(IntegerType), ArrayType(IntegerType))
checkDataType(ArrayType(IntegerType), ArrayType(LongType), ArrayType(LongType))
checkDataType(ArrayType(IntegerType), ArrayType(StringType), ArrayType(StringType))
checkDataType(ArrayType(IntegerType), StructType(Nil), StringType)
checkDataType(
ArrayType(IntegerType, true), ArrayType(IntegerType), ArrayType(IntegerType, true))
checkDataType(
ArrayType(IntegerType, true), ArrayType(IntegerType, false), ArrayType(IntegerType, true))
checkDataType(
ArrayType(IntegerType, true), ArrayType(IntegerType, true), ArrayType(IntegerType, true))
checkDataType(
ArrayType(IntegerType, false), ArrayType(IntegerType), ArrayType(IntegerType, true))
checkDataType(
ArrayType(IntegerType, false), ArrayType(IntegerType, false), ArrayType(IntegerType, false))
checkDataType(
ArrayType(IntegerType, false), ArrayType(IntegerType, true), ArrayType(IntegerType, true))
// StructType
checkDataType(StructType(Nil), StructType(Nil), StructType(Nil))
checkDataType(
StructType(StructField("f1", IntegerType, true) :: Nil),
StructType(StructField("f1", IntegerType, true) :: Nil),
StructType(StructField("f1", IntegerType, true) :: Nil))
checkDataType(
StructType(StructField("f1", IntegerType, true) :: Nil),
StructType(Nil),
StructType(StructField("f1", IntegerType, true) :: Nil))
checkDataType(
StructType(
StructField("f1", IntegerType, true) ::
StructField("f2", IntegerType, true) :: Nil),
StructType(StructField("f1", LongType, true) :: Nil),
StructType(
StructField("f1", LongType, true) ::
StructField("f2", IntegerType, true) :: Nil))
checkDataType(
StructType(
StructField("f1", IntegerType, true) :: Nil),
StructType(
StructField("f2", IntegerType, true) :: Nil),
StructType(
StructField("f1", IntegerType, true) ::
StructField("f2", IntegerType, true) :: Nil))
checkDataType(
StructType(
StructField("f1", IntegerType, true) :: Nil),
DecimalType.SYSTEM_DEFAULT,
StringType)
}
test("Complex field and type inferring with null in sampling") {
val jsonDF = spark.read.json(jsonNullStruct)
val expectedSchema = StructType(
StructField("headers", StructType(
StructField("Charset", StringType, true) ::
StructField("Host", StringType, true) :: Nil)
, true) ::
StructField("ip", StringType, true) ::
StructField("nullstr", StringType, true):: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select nullstr, headers.Host from jsonTable"),
Seq(Row("", "1.abc.com"), Row("", null), Row("", null), Row(null, null))
)
}
test("Primitive field and type inferring") {
val jsonDF = spark.read.json(primitiveFieldAndType)
val expectedSchema = StructType(
StructField("bigInteger", DecimalType(20, 0), true) ::
StructField("boolean", BooleanType, true) ::
StructField("double", DoubleType, true) ::
StructField("integer", LongType, true) ::
StructField("long", LongType, true) ::
StructField("null", StringType, true) ::
StructField("string", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
null,
"this is a simple string.")
)
}
test("Complex field and type inferring") {
val jsonDF = spark.read.json(complexFieldAndType1)
val expectedSchema = StructType(
StructField("arrayOfArray1", ArrayType(ArrayType(StringType, true), true), true) ::
StructField("arrayOfArray2", ArrayType(ArrayType(DoubleType, true), true), true) ::
StructField("arrayOfBigInteger", ArrayType(DecimalType(21, 0), true), true) ::
StructField("arrayOfBoolean", ArrayType(BooleanType, true), true) ::
StructField("arrayOfDouble", ArrayType(DoubleType, true), true) ::
StructField("arrayOfInteger", ArrayType(LongType, true), true) ::
StructField("arrayOfLong", ArrayType(LongType, true), true) ::
StructField("arrayOfNull", ArrayType(StringType, true), true) ::
StructField("arrayOfString", ArrayType(StringType, true), true) ::
StructField("arrayOfStruct", ArrayType(
StructType(
StructField("field1", BooleanType, true) ::
StructField("field2", StringType, true) ::
StructField("field3", StringType, true) :: Nil), true), true) ::
StructField("struct", StructType(
StructField("field1", BooleanType, true) ::
StructField("field2", DecimalType(20, 0), true) :: Nil), true) ::
StructField("structWithArrayFields", StructType(
StructField("field1", ArrayType(LongType, true), true) ::
StructField("field2", ArrayType(StringType, true), true) :: Nil), true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
// Access elements of a primitive array.
checkAnswer(
sql("select arrayOfString[0], arrayOfString[1], arrayOfString[2] from jsonTable"),
Row("str1", "str2", null)
)
// Access an array of null values.
checkAnswer(
sql("select arrayOfNull from jsonTable"),
Row(Seq(null, null, null, null))
)
// Access elements of a BigInteger array (we use DecimalType internally).
checkAnswer(
sql("select arrayOfBigInteger[0], arrayOfBigInteger[1], arrayOfBigInteger[2] from jsonTable"),
Row(new java.math.BigDecimal("922337203685477580700"),
new java.math.BigDecimal("-922337203685477580800"), null)
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray1[0], arrayOfArray1[1] from jsonTable"),
Row(Seq("1", "2", "3"), Seq("str1", "str2"))
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray2[0], arrayOfArray2[1] from jsonTable"),
Row(Seq(1.0, 2.0, 3.0), Seq(1.1, 2.1, 3.1))
)
// Access elements of an array inside a filed with the type of ArrayType(ArrayType).
checkAnswer(
sql("select arrayOfArray1[1][1], arrayOfArray2[1][1] from jsonTable"),
Row("str2", 2.1)
)
// Access elements of an array of structs.
checkAnswer(
sql("select arrayOfStruct[0], arrayOfStruct[1], arrayOfStruct[2], arrayOfStruct[3] " +
"from jsonTable"),
Row(
Row(true, "str1", null),
Row(false, null, null),
Row(null, null, null),
null)
)
// Access a struct and fields inside of it.
checkAnswer(
sql("select struct, struct.field1, struct.field2 from jsonTable"),
Row(
Row(true, new java.math.BigDecimal("92233720368547758070")),
true,
new java.math.BigDecimal("92233720368547758070")) :: Nil
)
// Access an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1, structWithArrayFields.field2 from jsonTable"),
Row(Seq(4, 5, 6), Seq("str1", "str2"))
)
// Access elements of an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1[1], structWithArrayFields.field2[3] from jsonTable"),
Row(5, null)
)
}
test("GetField operation on complex data type") {
val jsonDF = spark.read.json(complexFieldAndType1)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select arrayOfStruct[0].field1, arrayOfStruct[0].field2 from jsonTable"),
Row(true, "str1")
)
// Getting all values of a specific field from an array of structs.
checkAnswer(
sql("select arrayOfStruct.field1, arrayOfStruct.field2 from jsonTable"),
Row(Seq(true, false, null), Seq("str1", null, null))
)
}
test("Type conflict in primitive field values") {
val jsonDF = spark.read.json(primitiveFieldValueTypeConflict)
val expectedSchema = StructType(
StructField("num_bool", StringType, true) ::
StructField("num_num_1", LongType, true) ::
StructField("num_num_2", DoubleType, true) ::
StructField("num_num_3", DoubleType, true) ::
StructField("num_str", StringType, true) ::
StructField("str_bool", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row("true", 11L, null, 1.1, "13.1", "str1") ::
Row("12", null, 21474836470.9, null, null, "true") ::
Row("false", 21474836470L, 92233720368547758070d, 100, "str1", "false") ::
Row(null, 21474836570L, 1.1, 21474836470L, "92233720368547758070", null) :: Nil
)
// Number and Boolean conflict: resolve the type as number in this query.
checkAnswer(
sql("select num_bool - 10 from jsonTable where num_bool > 11"),
Row(2)
)
// Widening to LongType
checkAnswer(
sql("select num_num_1 - 100 from jsonTable where num_num_1 > 11"),
Row(21474836370L) :: Row(21474836470L) :: Nil
)
checkAnswer(
sql("select num_num_1 - 100 from jsonTable where num_num_1 > 10"),
Row(-89) :: Row(21474836370L) :: Row(21474836470L) :: Nil
)
// Widening to DecimalType
checkAnswer(
sql("select num_num_2 + 1.3 from jsonTable where num_num_2 > 1.1"),
Row(21474836472.2) ::
Row(92233720368547758071.3) :: Nil
)
// Widening to Double
checkAnswer(
sql("select num_num_3 + 1.2 from jsonTable where num_num_3 > 1.1"),
Row(101.2) :: Row(21474836471.2) :: Nil
)
// Number and String conflict: resolve the type as number in this query.
checkAnswer(
sql("select num_str + 1.2 from jsonTable where num_str > 14d"),
Row(92233720368547758071.2)
)
// Number and String conflict: resolve the type as number in this query.
checkAnswer(
sql("select num_str + 1.2 from jsonTable where num_str >= 92233720368547758060"),
Row(new java.math.BigDecimal("92233720368547758071.2").doubleValue)
)
// String and Boolean conflict: resolve the type as string.
checkAnswer(
sql("select * from jsonTable where str_bool = 'str1'"),
Row("true", 11L, null, 1.1, "13.1", "str1")
)
}
ignore("Type conflict in primitive field values (Ignored)") {
val jsonDF = spark.read.json(primitiveFieldValueTypeConflict)
jsonDF.createOrReplaceTempView("jsonTable")
// Right now, the analyzer does not promote strings in a boolean expression.
// Number and Boolean conflict: resolve the type as boolean in this query.
checkAnswer(
sql("select num_bool from jsonTable where NOT num_bool"),
Row(false)
)
checkAnswer(
sql("select str_bool from jsonTable where NOT str_bool"),
Row(false)
)
// Right now, the analyzer does not know that num_bool should be treated as a boolean.
// Number and Boolean conflict: resolve the type as boolean in this query.
checkAnswer(
sql("select num_bool from jsonTable where num_bool"),
Row(true)
)
checkAnswer(
sql("select str_bool from jsonTable where str_bool"),
Row(false)
)
// The plan of the following DSL is
// Project [(CAST(num_str#65:4, DoubleType) + 1.2) AS num#78]
// Filter (CAST(CAST(num_str#65:4, DoubleType), DecimalType) > 92233720368547758060)
// ExistingRdd [num_bool#61,num_num_1#62L,num_num_2#63,num_num_3#64,num_str#65,str_bool#66]
// We should directly cast num_str to DecimalType and also need to do the right type promotion
// in the Project.
checkAnswer(
jsonDF.
where('num_str >= BigDecimal("92233720368547758060")).
select(('num_str + 1.2).as("num")),
Row(new java.math.BigDecimal("92233720368547758071.2").doubleValue())
)
// The following test will fail. The type of num_str is StringType.
// So, to evaluate num_str + 1.2, we first need to use Cast to convert the type.
// In our test data, one value of num_str is 13.1.
// The result of (CAST(num_str#65:4, DoubleType) + 1.2) for this value is 14.299999999999999,
// which is not 14.3.
// Number and String conflict: resolve the type as number in this query.
checkAnswer(
sql("select num_str + 1.2 from jsonTable where num_str > 13"),
Row(BigDecimal("14.3")) :: Row(BigDecimal("92233720368547758071.2")) :: Nil
)
}
test("Type conflict in complex field values") {
val jsonDF = spark.read.json(complexFieldValueTypeConflict)
val expectedSchema = StructType(
StructField("array", ArrayType(LongType, true), true) ::
StructField("num_struct", StringType, true) ::
StructField("str_array", StringType, true) ::
StructField("struct", StructType(
StructField("field", StringType, true) :: Nil), true) ::
StructField("struct_array", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row(Seq(), "11", "[1,2,3]", Row(null), "[]") ::
Row(null, """{"field":false}""", null, null, "{}") ::
Row(Seq(4, 5, 6), null, "str", Row(null), "[7,8,9]") ::
Row(Seq(7), "{}", """["str1","str2",33]""", Row("str"), """{"field":true}""") :: Nil
)
}
test("Type conflict in array elements") {
val jsonDF = spark.read.json(arrayElementTypeConflict)
val expectedSchema = StructType(
StructField("array1", ArrayType(StringType, true), true) ::
StructField("array2", ArrayType(StructType(
StructField("field", LongType, true) :: Nil), true), true) ::
StructField("array3", ArrayType(StringType, true), true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row(Seq("1", "1.1", "true", null, "[]", "{}", "[2,3,4]",
"""{"field":"str"}"""), Seq(Row(214748364700L), Row(1)), null) ::
Row(null, null, Seq("""{"field":"str"}""", """{"field":1}""")) ::
Row(null, null, Seq("1", "2", "3")) :: Nil
)
// Treat an element as a number.
checkAnswer(
sql("select array1[0] + 1 from jsonTable where array1 is not null"),
Row(2)
)
}
test("Handling missing fields") {
val jsonDF = spark.read.json(missingFields)
val expectedSchema = StructType(
StructField("a", BooleanType, true) ::
StructField("b", LongType, true) ::
StructField("c", ArrayType(LongType, true), true) ::
StructField("d", StructType(
StructField("field", BooleanType, true) :: Nil), true) ::
StructField("e", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
}
test("Loading a JSON dataset from a text file") {
val dir = Utils.createTempDir()
dir.delete()
val path = dir.getCanonicalPath
primitiveFieldAndType.map(record => record.replaceAll("\\n", " ")).write.text(path)
val jsonDF = spark.read.json(path)
val expectedSchema = StructType(
StructField("bigInteger", DecimalType(20, 0), true) ::
StructField("boolean", BooleanType, true) ::
StructField("double", DoubleType, true) ::
StructField("integer", LongType, true) ::
StructField("long", LongType, true) ::
StructField("null", StringType, true) ::
StructField("string", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
null,
"this is a simple string.")
)
}
test("Loading a JSON dataset primitivesAsString returns schema with primitive types as strings") {
val dir = Utils.createTempDir()
dir.delete()
val path = dir.getCanonicalPath
primitiveFieldAndType.map(record => record.replaceAll("\\n", " ")).write.text(path)
val jsonDF = spark.read.option("primitivesAsString", "true").json(path)
val expectedSchema = StructType(
StructField("bigInteger", StringType, true) ::
StructField("boolean", StringType, true) ::
StructField("double", StringType, true) ::
StructField("integer", StringType, true) ::
StructField("long", StringType, true) ::
StructField("null", StringType, true) ::
StructField("string", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row("92233720368547758070",
"true",
"1.7976931348623157E308",
"10",
"21474836470",
null,
"this is a simple string.")
)
}
test("Loading a JSON dataset primitivesAsString returns complex fields as strings") {
val jsonDF = spark.read.option("primitivesAsString", "true").json(complexFieldAndType1)
val expectedSchema = StructType(
StructField("arrayOfArray1", ArrayType(ArrayType(StringType, true), true), true) ::
StructField("arrayOfArray2", ArrayType(ArrayType(StringType, true), true), true) ::
StructField("arrayOfBigInteger", ArrayType(StringType, true), true) ::
StructField("arrayOfBoolean", ArrayType(StringType, true), true) ::
StructField("arrayOfDouble", ArrayType(StringType, true), true) ::
StructField("arrayOfInteger", ArrayType(StringType, true), true) ::
StructField("arrayOfLong", ArrayType(StringType, true), true) ::
StructField("arrayOfNull", ArrayType(StringType, true), true) ::
StructField("arrayOfString", ArrayType(StringType, true), true) ::
StructField("arrayOfStruct", ArrayType(
StructType(
StructField("field1", StringType, true) ::
StructField("field2", StringType, true) ::
StructField("field3", StringType, true) :: Nil), true), true) ::
StructField("struct", StructType(
StructField("field1", StringType, true) ::
StructField("field2", StringType, true) :: Nil), true) ::
StructField("structWithArrayFields", StructType(
StructField("field1", ArrayType(StringType, true), true) ::
StructField("field2", ArrayType(StringType, true), true) :: Nil), true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
// Access elements of a primitive array.
checkAnswer(
sql("select arrayOfString[0], arrayOfString[1], arrayOfString[2] from jsonTable"),
Row("str1", "str2", null)
)
// Access an array of null values.
checkAnswer(
sql("select arrayOfNull from jsonTable"),
Row(Seq(null, null, null, null))
)
// Access elements of a BigInteger array (we use DecimalType internally).
checkAnswer(
sql("select arrayOfBigInteger[0], arrayOfBigInteger[1], arrayOfBigInteger[2] from jsonTable"),
Row("922337203685477580700", "-922337203685477580800", null)
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray1[0], arrayOfArray1[1] from jsonTable"),
Row(Seq("1", "2", "3"), Seq("str1", "str2"))
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray2[0], arrayOfArray2[1] from jsonTable"),
Row(Seq("1", "2", "3"), Seq("1.1", "2.1", "3.1"))
)
// Access elements of an array inside a filed with the type of ArrayType(ArrayType).
checkAnswer(
sql("select arrayOfArray1[1][1], arrayOfArray2[1][1] from jsonTable"),
Row("str2", "2.1")
)
// Access elements of an array of structs.
checkAnswer(
sql("select arrayOfStruct[0], arrayOfStruct[1], arrayOfStruct[2], arrayOfStruct[3] " +
"from jsonTable"),
Row(
Row("true", "str1", null),
Row("false", null, null),
Row(null, null, null),
null)
)
// Access a struct and fields inside of it.
checkAnswer(
sql("select struct, struct.field1, struct.field2 from jsonTable"),
Row(
Row("true", "92233720368547758070"),
"true",
"92233720368547758070") :: Nil
)
// Access an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1, structWithArrayFields.field2 from jsonTable"),
Row(Seq("4", "5", "6"), Seq("str1", "str2"))
)
// Access elements of an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1[1], structWithArrayFields.field2[3] from jsonTable"),
Row("5", null)
)
}
test("Loading a JSON dataset prefersDecimal returns schema with float types as BigDecimal") {
val jsonDF = spark.read.option("prefersDecimal", "true").json(primitiveFieldAndType)
val expectedSchema = StructType(
StructField("bigInteger", DecimalType(20, 0), true) ::
StructField("boolean", BooleanType, true) ::
StructField("double", DecimalType(17, -292), true) ::
StructField("integer", LongType, true) ::
StructField("long", LongType, true) ::
StructField("null", StringType, true) ::
StructField("string", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row(BigDecimal("92233720368547758070"),
true,
BigDecimal("1.7976931348623157E308"),
10,
21474836470L,
null,
"this is a simple string.")
)
}
test("Find compatible types even if inferred DecimalType is not capable of other IntegralType") {
val mixedIntegerAndDoubleRecords = Seq(
"""{"a": 3, "b": 1.1}""",
s"""{"a": 3.1, "b": 0.${"0" * 38}1}""").toDS()
val jsonDF = spark.read
.option("prefersDecimal", "true")
.json(mixedIntegerAndDoubleRecords)
// The values in `a` field will be decimals as they fit in decimal. For `b` field,
// they will be doubles as `1.0E-39D` does not fit.
val expectedSchema = StructType(
StructField("a", DecimalType(21, 1), true) ::
StructField("b", DoubleType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
checkAnswer(
jsonDF,
Row(BigDecimal("3"), 1.1D) ::
Row(BigDecimal("3.1"), 1.0E-39D) :: Nil
)
}
test("Infer big integers correctly even when it does not fit in decimal") {
val jsonDF = spark.read
.json(bigIntegerRecords)
// The value in `a` field will be a double as it does not fit in decimal. For `b` field,
// it will be a decimal as `92233720368547758070`.
val expectedSchema = StructType(
StructField("a", DoubleType, true) ::
StructField("b", DecimalType(20, 0), true) :: Nil)
assert(expectedSchema === jsonDF.schema)
checkAnswer(jsonDF, Row(1.0E38D, BigDecimal("92233720368547758070")))
}
test("Infer floating-point values correctly even when it does not fit in decimal") {
val jsonDF = spark.read
.option("prefersDecimal", "true")
.json(floatingValueRecords)
// The value in `a` field will be a double as it does not fit in decimal. For `b` field,
// it will be a decimal as `0.01` by having a precision equal to the scale.
val expectedSchema = StructType(
StructField("a", DoubleType, true) ::
StructField("b", DecimalType(2, 2), true):: Nil)
assert(expectedSchema === jsonDF.schema)
checkAnswer(jsonDF, Row(1.0E-39D, BigDecimal(0.01)))
val mergedJsonDF = spark.read
.option("prefersDecimal", "true")
.json(floatingValueRecords.union(bigIntegerRecords))
val expectedMergedSchema = StructType(
StructField("a", DoubleType, true) ::
StructField("b", DecimalType(22, 2), true):: Nil)
assert(expectedMergedSchema === mergedJsonDF.schema)
checkAnswer(
mergedJsonDF,
Row(1.0E-39D, BigDecimal(0.01)) ::
Row(1.0E38D, BigDecimal("92233720368547758070")) :: Nil
)
}
test("Loading a JSON dataset from a text file with SQL") {
val dir = Utils.createTempDir()
dir.delete()
val path = dir.toURI.toString
primitiveFieldAndType.map(record => record.replaceAll("\\n", " ")).write.text(path)
sql(
s"""
|CREATE TEMPORARY VIEW jsonTableSQL
|USING org.apache.spark.sql.json
|OPTIONS (
| path '$path'
|)
""".stripMargin)
checkAnswer(
sql("select * from jsonTableSQL"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
null,
"this is a simple string.")
)
}
test("Applying schemas") {
val dir = Utils.createTempDir()
dir.delete()
val path = dir.getCanonicalPath
primitiveFieldAndType.map(record => record.replaceAll("\\n", " ")).write.text(path)
val schema = StructType(
StructField("bigInteger", DecimalType.SYSTEM_DEFAULT, true) ::
StructField("boolean", BooleanType, true) ::
StructField("double", DoubleType, true) ::
StructField("integer", IntegerType, true) ::
StructField("long", LongType, true) ::
StructField("null", StringType, true) ::
StructField("string", StringType, true) :: Nil)
val jsonDF1 = spark.read.schema(schema).json(path)
assert(schema === jsonDF1.schema)
jsonDF1.createOrReplaceTempView("jsonTable1")
checkAnswer(
sql("select * from jsonTable1"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
null,
"this is a simple string.")
)
val jsonDF2 = spark.read.schema(schema).json(primitiveFieldAndType)
assert(schema === jsonDF2.schema)
jsonDF2.createOrReplaceTempView("jsonTable2")
checkAnswer(
sql("select * from jsonTable2"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
null,
"this is a simple string.")
)
}
test("Applying schemas with MapType") {
val schemaWithSimpleMap = StructType(
StructField("map", MapType(StringType, IntegerType, true), false) :: Nil)
val jsonWithSimpleMap = spark.read.schema(schemaWithSimpleMap).json(mapType1)
jsonWithSimpleMap.createOrReplaceTempView("jsonWithSimpleMap")
checkAnswer(
sql("select `map` from jsonWithSimpleMap"),
Row(Map("a" -> 1)) ::
Row(Map("b" -> 2)) ::
Row(Map("c" -> 3)) ::
Row(Map("c" -> 1, "d" -> 4)) ::
Row(Map("e" -> null)) :: Nil
)
checkAnswer(
sql("select `map`['c'] from jsonWithSimpleMap"),
Row(null) ::
Row(null) ::
Row(3) ::
Row(1) ::
Row(null) :: Nil
)
val innerStruct = StructType(
StructField("field1", ArrayType(IntegerType, true), true) ::
StructField("field2", IntegerType, true) :: Nil)
val schemaWithComplexMap = StructType(
StructField("map", MapType(StringType, innerStruct, true), false) :: Nil)
val jsonWithComplexMap = spark.read.schema(schemaWithComplexMap).json(mapType2)
jsonWithComplexMap.createOrReplaceTempView("jsonWithComplexMap")
checkAnswer(
sql("select `map` from jsonWithComplexMap"),
Row(Map("a" -> Row(Seq(1, 2, 3, null), null))) ::
Row(Map("b" -> Row(null, 2))) ::
Row(Map("c" -> Row(Seq(), 4))) ::
Row(Map("c" -> Row(null, 3), "d" -> Row(Seq(null), null))) ::
Row(Map("e" -> null)) ::
Row(Map("f" -> Row(null, null))) :: Nil
)
checkAnswer(
sql("select `map`['a'].field1, `map`['c'].field2 from jsonWithComplexMap"),
Row(Seq(1, 2, 3, null), null) ::
Row(null, null) ::
Row(null, 4) ::
Row(null, 3) ::
Row(null, null) ::
Row(null, null) :: Nil
)
}
test("SPARK-2096 Correctly parse dot notations") {
val jsonDF = spark.read.json(complexFieldAndType2)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select arrayOfStruct[0].field1, arrayOfStruct[0].field2 from jsonTable"),
Row(true, "str1")
)
checkAnswer(
sql(
"""
|select complexArrayOfStruct[0].field1[1].inner2[0], complexArrayOfStruct[1].field2[0][1]
|from jsonTable
""".stripMargin),
Row("str2", 6)
)
}
test("SPARK-3390 Complex arrays") {
val jsonDF = spark.read.json(complexFieldAndType2)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql(
"""
|select arrayOfArray1[0][0][0], arrayOfArray1[1][0][1], arrayOfArray1[1][1][0]
|from jsonTable
""".stripMargin),
Row(5, 7, 8)
)
checkAnswer(
sql(
"""
|select arrayOfArray2[0][0][0].inner1, arrayOfArray2[1][0],
|arrayOfArray2[1][1][1].inner2[0], arrayOfArray2[2][0][0].inner3[0][0].inner4
|from jsonTable
""".stripMargin),
Row("str1", Nil, "str4", 2)
)
}
test("SPARK-3308 Read top level JSON arrays") {
val jsonDF = spark.read.json(jsonArray)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql(
"""
|select a, b, c
|from jsonTable
""".stripMargin),
Row("str_a_1", null, null) ::
Row("str_a_2", null, null) ::
Row(null, "str_b_3", null) ::
Row("str_a_4", "str_b_4", "str_c_4") :: Nil
)
}
test("Corrupt records: FAILFAST mode") {
// `FAILFAST` mode should throw an exception for corrupt records.
val exceptionOne = intercept[SparkException] {
spark.read
.option("mode", "FAILFAST")
.json(corruptRecords)
}.getMessage
assert(exceptionOne.contains(
"Malformed records are detected in schema inference. Parse Mode: FAILFAST."))
val exceptionTwo = intercept[SparkException] {
spark.read
.option("mode", "FAILFAST")
.schema("a string")
.json(corruptRecords)
.collect()
}.getMessage
assert(exceptionTwo.contains(
"Malformed records are detected in record parsing. Parse Mode: FAILFAST."))
}
test("Corrupt records: DROPMALFORMED mode") {
val schemaOne = StructType(
StructField("a", StringType, true) ::
StructField("b", StringType, true) ::
StructField("c", StringType, true) :: Nil)
val schemaTwo = StructType(
StructField("a", StringType, true) :: Nil)
// `DROPMALFORMED` mode should skip corrupt records
val jsonDFOne = spark.read
.option("mode", "DROPMALFORMED")
.json(corruptRecords)
checkAnswer(
jsonDFOne,
Row("str_a_4", "str_b_4", "str_c_4") :: Nil
)
assert(jsonDFOne.schema === schemaOne)
val jsonDFTwo = spark.read
.option("mode", "DROPMALFORMED")
.schema(schemaTwo)
.json(corruptRecords)
checkAnswer(
jsonDFTwo,
Row("str_a_4") :: Nil)
assert(jsonDFTwo.schema === schemaTwo)
}
test("SPARK-19641: Additional corrupt records: DROPMALFORMED mode") {
val schema = new StructType().add("dummy", StringType)
// `DROPMALFORMED` mode should skip corrupt records
val jsonDF = spark.read
.option("mode", "DROPMALFORMED")
.json(additionalCorruptRecords)
checkAnswer(
jsonDF,
Row("test"))
assert(jsonDF.schema === schema)
}
test("Corrupt records: PERMISSIVE mode, without designated column for malformed records") {
val schema = StructType(
StructField("a", StringType, true) ::
StructField("b", StringType, true) ::
StructField("c", StringType, true) :: Nil)
val jsonDF = spark.read.schema(schema).json(corruptRecords)
checkAnswer(
jsonDF.select($"a", $"b", $"c"),
Seq(
// Corrupted records are replaced with null
Row(null, null, null),
Row(null, null, null),
Row(null, null, null),
Row("str_a_4", "str_b_4", "str_c_4"),
Row(null, null, null))
)
}
test("Corrupt records: PERMISSIVE mode, with designated column for malformed records") {
// Test if we can query corrupt records.
withSQLConf(SQLConf.COLUMN_NAME_OF_CORRUPT_RECORD.key -> "_unparsed") {
val jsonDF = spark.read.json(corruptRecords)
val schema = StructType(
StructField("_unparsed", StringType, true) ::
StructField("a", StringType, true) ::
StructField("b", StringType, true) ::
StructField("c", StringType, true) :: Nil)
assert(schema === jsonDF.schema)
// In HiveContext, backticks should be used to access columns starting with a underscore.
checkAnswer(
jsonDF.select($"a", $"b", $"c", $"_unparsed"),
Row(null, null, null, "{") ::
Row(null, null, null, """{"a":1, b:2}""") ::
Row(null, null, null, """{"a":{, b:3}""") ::
Row("str_a_4", "str_b_4", "str_c_4", null) ::
Row(null, null, null, "]") :: Nil
)
checkAnswer(
jsonDF.filter($"_unparsed".isNull).select($"a", $"b", $"c"),
Row("str_a_4", "str_b_4", "str_c_4")
)
checkAnswer(
jsonDF.filter($"_unparsed".isNotNull).select($"_unparsed"),
Row("{") ::
Row("""{"a":1, b:2}""") ::
Row("""{"a":{, b:3}""") ::
Row("]") :: Nil
)
}
}
test("SPARK-13953 Rename the corrupt record field via option") {
val jsonDF = spark.read
.option("columnNameOfCorruptRecord", "_malformed")
.json(corruptRecords)
val schema = StructType(
StructField("_malformed", StringType, true) ::
StructField("a", StringType, true) ::
StructField("b", StringType, true) ::
StructField("c", StringType, true) :: Nil)
assert(schema === jsonDF.schema)
checkAnswer(
jsonDF.selectExpr("a", "b", "c", "_malformed"),
Row(null, null, null, "{") ::
Row(null, null, null, """{"a":1, b:2}""") ::
Row(null, null, null, """{"a":{, b:3}""") ::
Row("str_a_4", "str_b_4", "str_c_4", null) ::
Row(null, null, null, "]") :: Nil
)
}
test("SPARK-4068: nulls in arrays") {
val jsonDF = spark.read.json(nullsInArrays)
jsonDF.createOrReplaceTempView("jsonTable")
val schema = StructType(
StructField("field1",
ArrayType(ArrayType(ArrayType(ArrayType(StringType, true), true), true), true), true) ::
StructField("field2",
ArrayType(ArrayType(
StructType(StructField("Test", LongType, true) :: Nil), true), true), true) ::
StructField("field3",
ArrayType(ArrayType(
StructType(StructField("Test", StringType, true) :: Nil), true), true), true) ::
StructField("field4",
ArrayType(ArrayType(ArrayType(LongType, true), true), true), true) :: Nil)
assert(schema === jsonDF.schema)
checkAnswer(
sql(
"""
|SELECT field1, field2, field3, field4
|FROM jsonTable
""".stripMargin),
Row(Seq(Seq(null), Seq(Seq(Seq("Test")))), null, null, null) ::
Row(null, Seq(null, Seq(Row(1))), null, null) ::
Row(null, null, Seq(Seq(null), Seq(Row("2"))), null) ::
Row(null, null, null, Seq(Seq(null, Seq(1, 2, 3)))) :: Nil
)
}
test("SPARK-4228 DataFrame to JSON") {
val schema1 = StructType(
StructField("f1", IntegerType, false) ::
StructField("f2", StringType, false) ::
StructField("f3", BooleanType, false) ::
StructField("f4", ArrayType(StringType), nullable = true) ::
StructField("f5", IntegerType, true) :: Nil)
val rowRDD1 = unparsedStrings.map { r =>
val values = r.split(",").map(_.trim)
val v5 = try values(3).toInt catch {
case _: NumberFormatException => null
}
Row(values(0).toInt, values(1), values(2).toBoolean, r.split(",").toList, v5)
}
val df1 = spark.createDataFrame(rowRDD1, schema1)
df1.createOrReplaceTempView("applySchema1")
val df2 = df1.toDF
val result = df2.toJSON.collect()
// scalastyle:off
assert(result(0) === "{\\"f1\\":1,\\"f2\\":\\"A1\\",\\"f3\\":true,\\"f4\\":[\\"1\\",\\" A1\\",\\" true\\",\\" null\\"]}")
assert(result(3) === "{\\"f1\\":4,\\"f2\\":\\"D4\\",\\"f3\\":true,\\"f4\\":[\\"4\\",\\" D4\\",\\" true\\",\\" 2147483644\\"],\\"f5\\":2147483644}")
// scalastyle:on
val schema2 = StructType(
StructField("f1", StructType(
StructField("f11", IntegerType, false) ::
StructField("f12", BooleanType, false) :: Nil), false) ::
StructField("f2", MapType(StringType, IntegerType, true), false) :: Nil)
val rowRDD2 = unparsedStrings.map { r =>
val values = r.split(",").map(_.trim)
val v4 = try values(3).toInt catch {
case _: NumberFormatException => null
}
Row(Row(values(0).toInt, values(2).toBoolean), Map(values(1) -> v4))
}
val df3 = spark.createDataFrame(rowRDD2, schema2)
df3.createOrReplaceTempView("applySchema2")
val df4 = df3.toDF
val result2 = df4.toJSON.collect()
assert(result2(1) === "{\\"f1\\":{\\"f11\\":2,\\"f12\\":false},\\"f2\\":{\\"B2\\":null}}")
assert(result2(3) === "{\\"f1\\":{\\"f11\\":4,\\"f12\\":true},\\"f2\\":{\\"D4\\":2147483644}}")
val jsonDF = spark.read.json(primitiveFieldAndType)
val primTable = spark.read.json(jsonDF.toJSON)
primTable.createOrReplaceTempView("primitiveTable")
checkAnswer(
sql("select * from primitiveTable"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
"this is a simple string.")
)
val complexJsonDF = spark.read.json(complexFieldAndType1)
val compTable = spark.read.json(complexJsonDF.toJSON)
compTable.createOrReplaceTempView("complexTable")
// Access elements of a primitive array.
checkAnswer(
sql("select arrayOfString[0], arrayOfString[1], arrayOfString[2] from complexTable"),
Row("str1", "str2", null)
)
// Access an array of null values.
checkAnswer(
sql("select arrayOfNull from complexTable"),
Row(Seq(null, null, null, null))
)
// Access elements of a BigInteger array (we use DecimalType internally).
checkAnswer(
sql("select arrayOfBigInteger[0], arrayOfBigInteger[1], arrayOfBigInteger[2] " +
" from complexTable"),
Row(new java.math.BigDecimal("922337203685477580700"),
new java.math.BigDecimal("-922337203685477580800"), null)
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray1[0], arrayOfArray1[1] from complexTable"),
Row(Seq("1", "2", "3"), Seq("str1", "str2"))
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray2[0], arrayOfArray2[1] from complexTable"),
Row(Seq(1.0, 2.0, 3.0), Seq(1.1, 2.1, 3.1))
)
// Access elements of an array inside a filed with the type of ArrayType(ArrayType).
checkAnswer(
sql("select arrayOfArray1[1][1], arrayOfArray2[1][1] from complexTable"),
Row("str2", 2.1)
)
// Access a struct and fields inside of it.
checkAnswer(
sql("select struct, struct.field1, struct.field2 from complexTable"),
Row(
Row(true, new java.math.BigDecimal("92233720368547758070")),
true,
new java.math.BigDecimal("92233720368547758070")) :: Nil
)
// Access an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1, structWithArrayFields.field2 from complexTable"),
Row(Seq(4, 5, 6), Seq("str1", "str2"))
)
// Access elements of an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1[1], structWithArrayFields.field2[3] " +
"from complexTable"),
Row(5, null)
)
}
test("Dataset toJSON doesn't construct rdd") {
val containsRDD = spark.emptyDataFrame.toJSON.queryExecution.logical.find {
case ExternalRDD(_, _) => true
case _ => false
}
assert(containsRDD.isEmpty, "Expected logical plan of toJSON to not contain an RDD")
}
test("JSONRelation equality test") {
withTempPath(dir => {
val path = dir.getCanonicalFile.toURI.toString
sparkContext.parallelize(1 to 100)
.map(i => s"""{"a": 1, "b": "str$i"}""").saveAsTextFile(path)
val d1 = DataSource(
spark,
userSpecifiedSchema = None,
partitionColumns = Array.empty[String],
bucketSpec = None,
className = classOf[JsonFileFormat].getCanonicalName,
options = Map("path" -> path)).resolveRelation()
val d2 = DataSource(
spark,
userSpecifiedSchema = None,
partitionColumns = Array.empty[String],
bucketSpec = None,
className = classOf[JsonFileFormat].getCanonicalName,
options = Map("path" -> path)).resolveRelation()
assert(d1 === d2)
})
}
test("SPARK-6245 JsonInferSchema.infer on empty RDD") {
// This is really a test that it doesn't throw an exception
val emptySchema = JsonInferSchema.infer(
empty.rdd,
new JSONOptions(Map.empty[String, String], "GMT"),
CreateJacksonParser.string)
assert(StructType(Seq()) === emptySchema)
}
test("SPARK-7565 MapType in JsonRDD") {
withSQLConf(SQLConf.COLUMN_NAME_OF_CORRUPT_RECORD.key -> "_unparsed") {
withTempDir { dir =>
val schemaWithSimpleMap = StructType(
StructField("map", MapType(StringType, IntegerType, true), false) :: Nil)
val df = spark.read.schema(schemaWithSimpleMap).json(mapType1)
val path = dir.getAbsolutePath
df.write.mode("overwrite").parquet(path)
// order of MapType is not defined
assert(spark.read.parquet(path).count() == 5)
val df2 = spark.read.json(corruptRecords)
df2.write.mode("overwrite").parquet(path)
checkAnswer(spark.read.parquet(path), df2.collect())
}
}
}
test("SPARK-8093 Erase empty structs") {
val emptySchema = JsonInferSchema.infer(
emptyRecords.rdd,
new JSONOptions(Map.empty[String, String], "GMT"),
CreateJacksonParser.string)
assert(StructType(Seq()) === emptySchema)
}
test("JSON with Partition") {
def makePartition(rdd: RDD[String], parent: File, partName: String, partValue: Any): File = {
val p = new File(parent, s"$partName=${partValue.toString}")
rdd.saveAsTextFile(p.getCanonicalPath)
p
}
withTempPath(root => {
val d1 = new File(root, "d1=1")
// root/dt=1/col1=abc
val p1_col1 = makePartition(
sparkContext.parallelize(2 to 5).map(i => s"""{"a": 1, "b": "str$i"}"""),
d1,
"col1",
"abc")
// root/dt=1/col1=abd
val p2 = makePartition(
sparkContext.parallelize(6 to 10).map(i => s"""{"a": 1, "b": "str$i"}"""),
d1,
"col1",
"abd")
spark.read.json(root.getAbsolutePath).createOrReplaceTempView("test_myjson_with_part")
checkAnswer(sql(
"SELECT count(a) FROM test_myjson_with_part where d1 = 1 and col1='abc'"), Row(4))
checkAnswer(sql(
"SELECT count(a) FROM test_myjson_with_part where d1 = 1 and col1='abd'"), Row(5))
checkAnswer(sql(
"SELECT count(a) FROM test_myjson_with_part where d1 = 1"), Row(9))
})
}
test("backward compatibility") {
// This test we make sure our JSON support can read JSON data generated by previous version
// of Spark generated through toJSON method and JSON data source.
// The data is generated by the following program.
// Here are a few notes:
// - Spark 1.5.0 cannot save timestamp data. So, we manually added timestamp field (col13)
// in the JSON object.
// - For Spark before 1.5.1, we do not generate UDTs. So, we manually added the UDT value to
// JSON objects generated by those Spark versions (col17).
// - If the type is NullType, we do not write data out.
// Create the schema.
val struct =
StructType(
StructField("f1", FloatType, true) ::
StructField("f2", ArrayType(BooleanType), true) :: Nil)
val dataTypes =
Seq(
StringType, BinaryType, NullType, BooleanType,
ByteType, ShortType, IntegerType, LongType,
FloatType, DoubleType, DecimalType(25, 5), DecimalType(6, 5),
DateType, TimestampType,
ArrayType(IntegerType), MapType(StringType, LongType), struct,
new UDT.MyDenseVectorUDT())
val fields = dataTypes.zipWithIndex.map { case (dataType, index) =>
StructField(s"col$index", dataType, nullable = true)
}
val schema = StructType(fields)
val constantValues =
Seq(
"a string in binary".getBytes(StandardCharsets.UTF_8),
null,
true,
1.toByte,
2.toShort,
3,
Long.MaxValue,
0.25.toFloat,
0.75,
new java.math.BigDecimal(s"1234.23456"),
new java.math.BigDecimal(s"1.23456"),
java.sql.Date.valueOf("2015-01-01"),
java.sql.Timestamp.valueOf("2015-01-01 23:50:59.123"),
Seq(2, 3, 4),
Map("a string" -> 2000L),
Row(4.75.toFloat, Seq(false, true)),
new UDT.MyDenseVector(Array(0.25, 2.25, 4.25)))
val data =
Row.fromSeq(Seq("Spark " + spark.sparkContext.version) ++ constantValues) :: Nil
// Data generated by previous versions.
// scalastyle:off
val existingJSONData =
"""{"col0":"Spark 1.2.2","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.3.1","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.3.1","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.4.1","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.4.1","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.5.0","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.5.0","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"16436","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" :: Nil
// scalastyle:on
// Generate data for the current version.
val df = spark.createDataFrame(spark.sparkContext.parallelize(data, 1), schema)
withTempPath { path =>
df.write.format("json").mode("overwrite").save(path.getCanonicalPath)
// df.toJSON will convert internal rows to external rows first and then generate
// JSON objects. While, df.write.format("json") will write internal rows directly.
val allJSON =
existingJSONData ++
df.toJSON.collect() ++
sparkContext.textFile(path.getCanonicalPath).collect()
Utils.deleteRecursively(path)
sparkContext.parallelize(allJSON, 1).saveAsTextFile(path.getCanonicalPath)
// Read data back with the schema specified.
val col0Values =
Seq(
"Spark 1.2.2",
"Spark 1.3.1",
"Spark 1.3.1",
"Spark 1.4.1",
"Spark 1.4.1",
"Spark 1.5.0",
"Spark 1.5.0",
"Spark " + spark.sparkContext.version,
"Spark " + spark.sparkContext.version)
val expectedResult = col0Values.map { v =>
Row.fromSeq(Seq(v) ++ constantValues)
}
checkAnswer(
spark.read.format("json").schema(schema).load(path.getCanonicalPath),
expectedResult
)
}
}
test("SPARK-11544 test pathfilter") {
withTempPath { dir =>
val path = dir.getCanonicalPath
val df = spark.range(2)
df.write.json(path + "/p=1")
df.write.json(path + "/p=2")
assert(spark.read.json(path).count() === 4)
val extraOptions = Map(
"mapred.input.pathFilter.class" -> classOf[TestFileFilter].getName,
"mapreduce.input.pathFilter.class" -> classOf[TestFileFilter].getName
)
assert(spark.read.options(extraOptions).json(path).count() === 2)
}
}
test("SPARK-12057 additional corrupt records do not throw exceptions") {
// Test if we can query corrupt records.
withSQLConf(SQLConf.COLUMN_NAME_OF_CORRUPT_RECORD.key -> "_unparsed") {
withTempView("jsonTable") {
val schema = StructType(
StructField("_unparsed", StringType, true) ::
StructField("dummy", StringType, true) :: Nil)
{
// We need to make sure we can infer the schema.
val jsonDF = spark.read.json(additionalCorruptRecords)
assert(jsonDF.schema === schema)
}
{
val jsonDF = spark.read.schema(schema).json(additionalCorruptRecords)
jsonDF.createOrReplaceTempView("jsonTable")
// In HiveContext, backticks should be used to access columns starting with a underscore.
checkAnswer(
sql(
"""
|SELECT dummy, _unparsed
|FROM jsonTable
""".stripMargin),
Row("test", null) ::
Row(null, """[1,2,3]""") ::
Row(null, """":"test", "a":1}""") ::
Row(null, """42""") ::
Row(null, """ ","ian":"test"}""") :: Nil
)
}
}
}
}
test("Parse JSON rows having an array type and a struct type in the same field.") {
withTempDir { dir =>
val dir = Utils.createTempDir()
dir.delete()
val path = dir.getCanonicalPath
arrayAndStructRecords.map(record => record.replaceAll("\\n", " ")).write.text(path)
val schema =
StructType(
StructField("a", StructType(
StructField("b", StringType) :: Nil
)) :: Nil)
val jsonDF = spark.read.schema(schema).json(path)
assert(jsonDF.count() == 2)
}
}
test("SPARK-12872 Support to specify the option for compression codec") {
withTempDir { dir =>
val dir = Utils.createTempDir()
dir.delete()
val path = dir.getCanonicalPath
primitiveFieldAndType.map(record => record.replaceAll("\\n", " ")).write.text(path)
val jsonDF = spark.read.json(path)
val jsonDir = new File(dir, "json").getCanonicalPath
jsonDF.coalesce(1).write
.format("json")
.option("compression", "gZiP")
.save(jsonDir)
val compressedFiles = new File(jsonDir).listFiles()
assert(compressedFiles.exists(_.getName.endsWith(".json.gz")))
val jsonCopy = spark.read
.format("json")
.load(jsonDir)
assert(jsonCopy.count == jsonDF.count)
val jsonCopySome = jsonCopy.selectExpr("string", "long", "boolean")
val jsonDFSome = jsonDF.selectExpr("string", "long", "boolean")
checkAnswer(jsonCopySome, jsonDFSome)
}
}
test("SPARK-13543 Write the output as uncompressed via option()") {
val extraOptions = Map[String, String](
"mapreduce.output.fileoutputformat.compress" -> "true",
"mapreduce.output.fileoutputformat.compress.type" -> CompressionType.BLOCK.toString,
"mapreduce.output.fileoutputformat.compress.codec" -> classOf[GzipCodec].getName,
"mapreduce.map.output.compress" -> "true",
"mapreduce.map.output.compress.codec" -> classOf[GzipCodec].getName
)
withTempDir { dir =>
val dir = Utils.createTempDir()
dir.delete()
val path = dir.getCanonicalPath
primitiveFieldAndType.map(record => record.replaceAll("\\n", " ")).write.text(path)
val jsonDF = spark.read.json(path)
val jsonDir = new File(dir, "json").getCanonicalPath
jsonDF.coalesce(1).write
.format("json")
.option("compression", "none")
.options(extraOptions)
.save(jsonDir)
val compressedFiles = new File(jsonDir).listFiles()
assert(compressedFiles.exists(!_.getName.endsWith(".json.gz")))
val jsonCopy = spark.read
.format("json")
.options(extraOptions)
.load(jsonDir)
assert(jsonCopy.count == jsonDF.count)
val jsonCopySome = jsonCopy.selectExpr("string", "long", "boolean")
val jsonDFSome = jsonDF.selectExpr("string", "long", "boolean")
checkAnswer(jsonCopySome, jsonDFSome)
}
}
test("Casting long as timestamp") {
withTempView("jsonTable") {
val schema = (new StructType).add("ts", TimestampType)
val jsonDF = spark.read.schema(schema).json(timestampAsLong)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select ts from jsonTable"),
Row(java.sql.Timestamp.valueOf("2016-01-02 03:04:05"))
)
}
}
test("wide nested json table") {
val nested = (1 to 100).map { i =>
s"""
|"c$i": $i
""".stripMargin
}.mkString(", ")
val json = s"""
|{"a": [{$nested}], "b": [{$nested}]}
""".stripMargin
val df = spark.read.json(Seq(json).toDS())
assert(df.schema.size === 2)
df.collect()
}
test("Write dates correctly with dateFormat option") {
val customSchema = new StructType(Array(StructField("date", DateType, true)))
withTempDir { dir =>
// With dateFormat option.
val datesWithFormatPath = s"${dir.getCanonicalPath}/datesWithFormat.json"
val datesWithFormat = spark.read
.schema(customSchema)
.option("dateFormat", "dd/MM/yyyy HH:mm")
.json(datesRecords)
datesWithFormat.write
.format("json")
.option("dateFormat", "yyyy/MM/dd")
.save(datesWithFormatPath)
// This will load back the dates as string.
val stringSchema = StructType(StructField("date", StringType, true) :: Nil)
val stringDatesWithFormat = spark.read
.schema(stringSchema)
.json(datesWithFormatPath)
val expectedStringDatesWithFormat = Seq(
Row("2015/08/26"),
Row("2014/10/27"),
Row("2016/01/28"))
checkAnswer(stringDatesWithFormat, expectedStringDatesWithFormat)
}
}
test("Write timestamps correctly with timestampFormat option") {
val customSchema = new StructType(Array(StructField("date", TimestampType, true)))
withTempDir { dir =>
// With dateFormat option.
val timestampsWithFormatPath = s"${dir.getCanonicalPath}/timestampsWithFormat.json"
val timestampsWithFormat = spark.read
.schema(customSchema)
.option("timestampFormat", "dd/MM/yyyy HH:mm")
.json(datesRecords)
timestampsWithFormat.write
.format("json")
.option("timestampFormat", "yyyy/MM/dd HH:mm")
.save(timestampsWithFormatPath)
// This will load back the timestamps as string.
val stringSchema = StructType(StructField("date", StringType, true) :: Nil)
val stringTimestampsWithFormat = spark.read
.schema(stringSchema)
.json(timestampsWithFormatPath)
val expectedStringDatesWithFormat = Seq(
Row("2015/08/26 18:00"),
Row("2014/10/27 18:30"),
Row("2016/01/28 20:00"))
checkAnswer(stringTimestampsWithFormat, expectedStringDatesWithFormat)
}
}
test("Write timestamps correctly with timestampFormat option and timeZone option") {
val customSchema = new StructType(Array(StructField("date", TimestampType, true)))
withTempDir { dir =>
// With dateFormat option and timeZone option.
val timestampsWithFormatPath = s"${dir.getCanonicalPath}/timestampsWithFormat.json"
val timestampsWithFormat = spark.read
.schema(customSchema)
.option("timestampFormat", "dd/MM/yyyy HH:mm")
.json(datesRecords)
timestampsWithFormat.write
.format("json")
.option("timestampFormat", "yyyy/MM/dd HH:mm")
.option(DateTimeUtils.TIMEZONE_OPTION, "GMT")
.save(timestampsWithFormatPath)
// This will load back the timestamps as string.
val stringSchema = StructType(StructField("date", StringType, true) :: Nil)
val stringTimestampsWithFormat = spark.read
.schema(stringSchema)
.json(timestampsWithFormatPath)
val expectedStringDatesWithFormat = Seq(
Row("2015/08/27 01:00"),
Row("2014/10/28 01:30"),
Row("2016/01/29 04:00"))
checkAnswer(stringTimestampsWithFormat, expectedStringDatesWithFormat)
val readBack = spark.read
.schema(customSchema)
.option("timestampFormat", "yyyy/MM/dd HH:mm")
.option(DateTimeUtils.TIMEZONE_OPTION, "GMT")
.json(timestampsWithFormatPath)
checkAnswer(readBack, timestampsWithFormat)
}
}
test("SPARK-18433: Improve DataSource option keys to be more case-insensitive") {
val records = Seq("""{"a": 3, "b": 1.1}""", """{"a": 3.1, "b": 0.000001}""").toDS()
val schema = StructType(
StructField("a", DecimalType(21, 1), true) ::
StructField("b", DecimalType(7, 6), true) :: Nil)
val df1 = spark.read.option("prefersDecimal", "true").json(records)
assert(df1.schema == schema)
val df2 = spark.read.option("PREfersdecimaL", "true").json(records)
assert(df2.schema == schema)
}
test("SPARK-18352: Parse normal multi-line JSON files (compressed)") {
withTempPath { dir =>
val path = dir.getCanonicalPath
primitiveFieldAndType
.toDF("value")
.write
.option("compression", "GzIp")
.text(path)
assert(new File(path).listFiles().exists(_.getName.endsWith(".gz")))
val jsonDF = spark.read.option("wholeFile", true).json(path)
val jsonDir = new File(dir, "json").getCanonicalPath
jsonDF.coalesce(1).write
.option("compression", "gZiP")
.json(jsonDir)
assert(new File(jsonDir).listFiles().exists(_.getName.endsWith(".json.gz")))
val originalData = spark.read.json(primitiveFieldAndType)
checkAnswer(jsonDF, originalData)
checkAnswer(spark.read.schema(originalData.schema).json(jsonDir), originalData)
}
}
test("SPARK-18352: Parse normal multi-line JSON files (uncompressed)") {
withTempPath { dir =>
val path = dir.getCanonicalPath
primitiveFieldAndType
.toDF("value")
.write
.text(path)
val jsonDF = spark.read.option("wholeFile", true).json(path)
val jsonDir = new File(dir, "json").getCanonicalPath
jsonDF.coalesce(1).write.json(jsonDir)
val compressedFiles = new File(jsonDir).listFiles()
assert(compressedFiles.exists(_.getName.endsWith(".json")))
val originalData = spark.read.json(primitiveFieldAndType)
checkAnswer(jsonDF, originalData)
checkAnswer(spark.read.schema(originalData.schema).json(jsonDir), originalData)
}
}
test("SPARK-18352: Expect one JSON document per file") {
// the json parser terminates as soon as it sees a matching END_OBJECT or END_ARRAY token.
// this might not be the optimal behavior but this test verifies that only the first value
// is parsed and the rest are discarded.
// alternatively the parser could continue parsing following objects, which may further reduce
// allocations by skipping the line reader entirely
withTempPath { dir =>
val path = dir.getCanonicalPath
spark
.createDataFrame(Seq(Tuple1("{}{invalid}")))
.coalesce(1)
.write
.text(path)
val jsonDF = spark.read.option("wholeFile", true).json(path)
// no corrupt record column should be created
assert(jsonDF.schema === StructType(Seq()))
// only the first object should be read
assert(jsonDF.count() === 1)
}
}
test("SPARK-18352: Handle multi-line corrupt documents (PERMISSIVE)") {
withTempPath { dir =>
val path = dir.getCanonicalPath
val corruptRecordCount = additionalCorruptRecords.count().toInt
assert(corruptRecordCount === 5)
additionalCorruptRecords
.toDF("value")
// this is the minimum partition count that avoids hash collisions
.repartition(corruptRecordCount * 4, F.hash($"value"))
.write
.text(path)
val jsonDF = spark.read.option("wholeFile", true).option("mode", "PERMISSIVE").json(path)
assert(jsonDF.count() === corruptRecordCount)
assert(jsonDF.schema === new StructType()
.add("_corrupt_record", StringType)
.add("dummy", StringType))
val counts = jsonDF
.join(
additionalCorruptRecords.toDF("value"),
F.regexp_replace($"_corrupt_record", "(^\\\\s+|\\\\s+$)", "") === F.trim($"value"),
"outer")
.agg(
F.count($"dummy").as("valid"),
F.count($"_corrupt_record").as("corrupt"),
F.count("*").as("count"))
checkAnswer(counts, Row(1, 4, 6))
}
}
test("SPARK-19641: Handle multi-line corrupt documents (DROPMALFORMED)") {
withTempPath { dir =>
val path = dir.getCanonicalPath
val corruptRecordCount = additionalCorruptRecords.count().toInt
assert(corruptRecordCount === 5)
additionalCorruptRecords
.toDF("value")
// this is the minimum partition count that avoids hash collisions
.repartition(corruptRecordCount * 4, F.hash($"value"))
.write
.text(path)
val jsonDF = spark.read.option("wholeFile", true).option("mode", "DROPMALFORMED").json(path)
checkAnswer(jsonDF, Seq(Row("test")))
}
}
test("SPARK-18352: Handle multi-line corrupt documents (FAILFAST)") {
withTempPath { dir =>
val path = dir.getCanonicalPath
val corruptRecordCount = additionalCorruptRecords.count().toInt
assert(corruptRecordCount === 5)
additionalCorruptRecords
.toDF("value")
// this is the minimum partition count that avoids hash collisions
.repartition(corruptRecordCount * 4, F.hash($"value"))
.write
.text(path)
val schema = new StructType().add("dummy", StringType)
// `FAILFAST` mode should throw an exception for corrupt records.
val exceptionOne = intercept[SparkException] {
spark.read
.option("wholeFile", true)
.option("mode", "FAILFAST")
.json(path)
}
assert(exceptionOne.getMessage.contains("Malformed records are detected in schema " +
"inference. Parse Mode: FAILFAST."))
val exceptionTwo = intercept[SparkException] {
spark.read
.option("wholeFile", true)
.option("mode", "FAILFAST")
.schema(schema)
.json(path)
.collect()
}
assert(exceptionTwo.getMessage.contains("Malformed records are detected in record " +
"parsing. Parse Mode: FAILFAST."))
}
}
test("Throw an exception if a `columnNameOfCorruptRecord` field violates requirements") {
val columnNameOfCorruptRecord = "_unparsed"
val schema = StructType(
StructField(columnNameOfCorruptRecord, IntegerType, true) ::
StructField("a", StringType, true) ::
StructField("b", StringType, true) ::
StructField("c", StringType, true) :: Nil)
val errMsg = intercept[AnalysisException] {
spark.read
.option("mode", "Permissive")
.option("columnNameOfCorruptRecord", columnNameOfCorruptRecord)
.schema(schema)
.json(corruptRecords)
}.getMessage
assert(errMsg.startsWith("The field for corrupt records must be string type and nullable"))
// We use `PERMISSIVE` mode by default if invalid string is given.
withTempPath { dir =>
val path = dir.getCanonicalPath
corruptRecords.toDF("value").write.text(path)
val errMsg = intercept[AnalysisException] {
spark.read
.option("mode", "permm")
.option("columnNameOfCorruptRecord", columnNameOfCorruptRecord)
.schema(schema)
.json(path)
.collect
}.getMessage
assert(errMsg.startsWith("The field for corrupt records must be string type and nullable"))
}
}
test("SPARK-18772: Parse special floats correctly") {
val jsons = Seq(
"""{"a": "NaN"}""",
"""{"a": "Infinity"}""",
"""{"a": "-Infinity"}""")
// positive cases
val checks: Seq[Double => Boolean] = Seq(
_.isNaN,
_.isPosInfinity,
_.isNegInfinity)
Seq(FloatType, DoubleType).foreach { dt =>
jsons.zip(checks).foreach { case (json, check) =>
val ds = spark.read
.schema(StructType(Seq(StructField("a", dt))))
.json(Seq(json).toDS())
.select($"a".cast(DoubleType)).as[Double]
assert(check(ds.first()))
}
}
// negative cases
Seq(FloatType, DoubleType).foreach { dt =>
val lowerCasedJsons = jsons.map(_.toLowerCase(Locale.ROOT))
// The special floats are case-sensitive so these cases below throw exceptions.
lowerCasedJsons.foreach { lowerCasedJson =>
val e = intercept[SparkException] {
spark.read
.option("mode", "FAILFAST")
.schema(StructType(Seq(StructField("a", dt))))
.json(Seq(lowerCasedJson).toDS())
.collect()
}
assert(e.getMessage.contains("Cannot parse"))
}
}
}
}
| map222/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala | Scala | apache-2.0 | 75,569 |
package controllers
import javax.inject.Inject
import akka.actor._
import upickle.default._
import scala.concurrent.Future
import scala.util._
import scala.concurrent.ExecutionContext.Implicits.global
import shared._
import utils.misc.MyTimeUtils._
import shared.SharedTimeUtils._
import smartchess._
import utils.misc._
import shared.SharedSettings._
import scala.collection.mutable.ArrayBuffer
object playermanagement_object {
import tables._
var changedtables = ArrayBuffer[String]()
def sitplayer(k: String, i: Int, player: Player): Boolean = {
changedtables = ArrayBuffer[String]()
if (!tables.contains(k)) return false
val t = tables(k)
if (player.human && (t.hashandle(player.handle))) return false
for ((sk, st) <- tables if (sk != k)) {
val useri = st.getuseri(player.handle)
if ((useri >= 0) && (!player.ai)) {
if (st.inprogress) return false
if (!st.terminated) st.players(useri).handle = ""
changedtables += sk
}
}
t.players(i) = player
val p = t.players(i)
p.timems = timeControlToTimeMs(t.timecontrol)
p.lasttimems = p.timems
if (!t.hashuman) t.resetplayers
t.idle = 0
t.players(i).valid
}
def updaterating(uuid: java.util.UUID, g: GlickoData) {
userDAO.find(uuid).flatMap {
case Some(user) => // Update user with rating
userDAO.save(user.copy(
rating = g.rating,
rd = g.rd,
lastrated = g.lastrated
))
case _ => { println("could not update user rating"); Future.successful({}) }
}
}
} | serversideapps/silhmojs | server/app/controllers/ScheduledTaskController/playermanagement.scala | Scala | apache-2.0 | 1,584 |
package tscfg.example
final case class ScalaExampleCfg(
endpoint : ScalaExampleCfg.Endpoint
)
object ScalaExampleCfg {
final case class Endpoint(
intReq : scala.Int,
interface : ScalaExampleCfg.Endpoint.Interface,
path : java.lang.String,
serial : scala.Option[scala.Int],
url : java.lang.String
)
object Endpoint {
final case class Interface(
port : scala.Int,
`type` : scala.Option[java.lang.String]
)
object Interface {
def apply(c: com.typesafe.config.Config, parentPath: java.lang.String, $tsCfgValidator: $TsCfgValidator): ScalaExampleCfg.Endpoint.Interface = {
ScalaExampleCfg.Endpoint.Interface(
port = if(c.hasPathOrNull("port")) c.getInt("port") else 8080,
`type` = if(c.hasPathOrNull("type")) Some(c.getString("type")) else None
)
}
}
def apply(c: com.typesafe.config.Config, parentPath: java.lang.String, $tsCfgValidator: $TsCfgValidator): ScalaExampleCfg.Endpoint = {
ScalaExampleCfg.Endpoint(
intReq = $_reqInt(parentPath, c, "intReq", $tsCfgValidator),
interface = ScalaExampleCfg.Endpoint.Interface(if(c.hasPathOrNull("interface")) c.getConfig("interface") else com.typesafe.config.ConfigFactory.parseString("interface{}"), parentPath + "interface.", $tsCfgValidator),
path = $_reqStr(parentPath, c, "path", $tsCfgValidator),
serial = if(c.hasPathOrNull("serial")) Some(c.getInt("serial")) else None,
url = if(c.hasPathOrNull("url")) c.getString("url") else "http://example.net"
)
}
private def $_reqInt(parentPath: java.lang.String, c: com.typesafe.config.Config, path: java.lang.String, $tsCfgValidator: $TsCfgValidator): scala.Int = {
if (c == null) 0
else try c.getInt(path)
catch {
case e:com.typesafe.config.ConfigException =>
$tsCfgValidator.addBadPath(parentPath + path, e)
0
}
}
private def $_reqStr(parentPath: java.lang.String, c: com.typesafe.config.Config, path: java.lang.String, $tsCfgValidator: $TsCfgValidator): java.lang.String = {
if (c == null) null
else try c.getString(path)
catch {
case e:com.typesafe.config.ConfigException =>
$tsCfgValidator.addBadPath(parentPath + path, e)
null
}
}
}
def apply(c: com.typesafe.config.Config): ScalaExampleCfg = {
val $tsCfgValidator: $TsCfgValidator = new $TsCfgValidator()
val parentPath: java.lang.String = ""
val $result = ScalaExampleCfg(
endpoint = ScalaExampleCfg.Endpoint(if(c.hasPathOrNull("endpoint")) c.getConfig("endpoint") else com.typesafe.config.ConfigFactory.parseString("endpoint{}"), parentPath + "endpoint.", $tsCfgValidator)
)
$tsCfgValidator.validate()
$result
}
final class $TsCfgValidator {
private val badPaths = scala.collection.mutable.ArrayBuffer[java.lang.String]()
def addBadPath(path: java.lang.String, e: com.typesafe.config.ConfigException): Unit = {
badPaths += s"'$path': ${e.getClass.getName}(${e.getMessage})"
}
def addInvalidEnumValue(path: java.lang.String, value: java.lang.String, enumName: java.lang.String): Unit = {
badPaths += s"'$path': invalid value $value for enumeration $enumName"
}
def validate(): Unit = {
if (badPaths.nonEmpty) {
throw new com.typesafe.config.ConfigException(
badPaths.mkString("Invalid configuration:\\n ", "\\n ", "")
){}
}
}
}
}
| carueda/tscfg | src/test/scala/tscfg/example/ScalaExampleCfg.scala | Scala | apache-2.0 | 3,550 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.scala.dsl;
import builder.RouteBuilder
import org.junit.Test
class PipelineRouteBuilderTest extends ScalaTestSupport {
@Test
def testPipelineWithArrows() = testPipeline("direct:a", "mock:a", "mock:b")
@Test
def testPipelineWithTos() = testPipeline("direct:c", "mock:c", "mock:d")
@Test
def testPipelineBlockWithArrows() = testPipeline("direct:e", "mock:e", "mock:f")
@Test
def testPipelineBlockWithTos() = testPipeline("direct:g", "mock:g", "mock:h")
def testPipeline(from: String, to: String*) = {
to.foreach {
_.expect { _.expectedMessageCount(1) }
}
from ! "<hello/>"
to.foreach {
_.assert()
}
}
val builder = new RouteBuilder {
//START SNIPPET: simple
"direct:a" --> "mock:a" --> "mock:b"
"direct:c" to "mock:c" to "mock:d"
//END SNIPPET: simple
//START SNIPPET: block
"direct:e" ==> {
--> ("mock:e")
--> ("mock:f")
}
"direct:g" ==> {
to ("mock:g")
to ("mock:h")
}
//END SNIPPET: block
}
}
| aaronwalker/camel | components/camel-scala/src/test/scala/org/apache/camel/scala/dsl/PipelineRouteBuilderTest.scala | Scala | apache-2.0 | 1,864 |
package plp.expressions1.util
import plp.expressions1.expression.{ExpAnd, ExpConcat, ExpEquals, ExpLength, ExpMenos, ExpNot, ExpOr, ExpSoma, ExpSub, Expressao, ValorBooleano, ValorInteiro, ValorString}
trait Visitor[T] {
def visit(expAnd: ExpAnd): T
def visit(expConcat: ExpConcat): T
def visit(expEquals: ExpEquals): T
def visit(expLength: ExpLength): T
def visit(expMenos: ExpMenos): T
def visit(expNot: ExpNot): T
def visit(expOr: ExpOr): T
def visit(expSoma: ExpSoma): T
def visit(expSub: ExpSub): T
def visit(valor: ValorBooleano): T
def visit(valor: ValorInteiro): T
def visit(valor: ValorString): T
def v: PartialFunction[Expressao, T] = {
case a: ExpAnd => visit(a)
case a: ExpOr => visit(a)
case a: ExpSoma => visit(a)
case a: ExpSub => visit(a)
case a: ExpConcat => visit(a)
case a: ExpEquals => visit(a)
case a: ExpLength => visit(a)
case a: ExpMenos => visit(a)
case a: ExpNot => visit(a)
case a: ValorInteiro => visit(a)
case a: ValorString => visit(a)
case a: ValorBooleano => visit(a)
}
} | lrlucena/PLP-Scala | src/plp/expressions1/util/Visitor.scala | Scala | gpl-3.0 | 1,136 |
package com.tribbloids.spookystuff.uav.spatial.util
import com.tribbloids.spookystuff.uav.spatial.TrellisGeom
case object UnknownTrellisGeometry extends TrellisGeom {
override def jtsGeom = ???
}
| tribbloid/spookystuff | uav/src/main/scala/com/tribbloids/spookystuff/uav/spatial/util/UnknownTrellisGeometry.scala | Scala | apache-2.0 | 200 |
/*
* Copyright (C) 2017 Roberto Leibman
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ldap.rfc2830
import asn1.Asn1Application
import asn1.Asn1String
import ldap.ExtendedRequest
import ldap.ExtendedResponse
import ldap.LDAPOID
import ldap.LdapResult
import ldap.MessageProtocolOp
import ldap.Plugin
import asn1.Asn1ContextSpecific
import dao.DAO
import ldap.LdapMessage
import scala.concurrent.Future
import ldap.LDAPResultType
case object TLSExtendedRequest extends ExtendedRequest {
override val oid = RFC2830Plugin.oid
}
case object TLSExtendedResponse extends ExtendedResponse {
override val oid = Some(RFC2830Plugin.oid)
override val ldapResult: LdapResult =
LdapResult(LDAPResultType.success, "", "Returned TLS Response")
}
object RFC2830Plugin extends Plugin {
val oid = LDAPOID("1.3.6.1.4.1.1466.20037")
override def decodeApplication(applicationAsn1: Asn1Application): Option[MessageProtocolOp] =
applicationAsn1.tag match {
case 23 => //Extended
applicationAsn1.value.toSeq match {
case Seq(Asn1ContextSpecific(_, value)) =>
val requestOid = value.map(_.toChar).mkString
if (requestOid == oid.value)
Some(TLSExtendedRequest)
else
None //Don't know this dude
case Seq(Asn1String(oid.value), _: Asn1String) ⇒
Some(TLSExtendedRequest)
case _ => None //Don't know this dude
}
case _ => None //Don't know this dude
}
override def operate(msg: LdapMessage,
preResults: Seq[LdapMessage],
dao: DAO): Future[Seq[LdapMessage]] =
msg.protocolOp match {
case TLSExtendedRequest =>
/*
TODO If the Start TLS extended request was not successful, the resultCode
will be one of:
operationsError (operations sequencing incorrect; e.g. TLS already established)
protocolError (TLS not supported or incorrect PDU structure)
referral (this server doesn't do TLS, try this one)
unavailable (e.g. some major problem with TLS, or server is shutting down)
*
*/
Future.successful {
preResults :+ LdapMessage(msg.messageId, TLSExtendedResponse)
}
}
}
| rleibman/scala-ldap-server | src/main/scala/ldap/rfc2830/RFC2830Plugin.scala | Scala | gpl-3.0 | 2,911 |
package inloopio.util.pinyin
import java.io.FileNotFoundException
import java.io.IOException
import scala.xml.Elem
import scala.xml.Node
import scala.xml.Text
import scala.xml.XML
/**
* A class contains logic that translates from Hanyu Pinyin to Gwoyeu Romatzyh
*
*
*/
object GwoyeuRomatzyhConverter {
/**
* A DOM model contains Hanyu Pinyin to Gwoyeu Romatzyh mapping
*/
private val pinyinToGwoyeuMappingDoc: Elem = try {
val mappingFileName = "org/aiotrade/lib/util/pinyin/pinyin_gwoyeu_mapping.xml"
XML.load(ResourceHelper.getResourceInputStream(mappingFileName))
} catch {
case ex: FileNotFoundException => throw ex
case ex: IOException => throw ex
case ex: Throwable => throw ex
}
/**
* The postfixs to distinguish different tone of Gwoyeu Romatzyh
*
* <i>Should be removed if new xPath parser supporting tag name with number.</i>
*/
private val tones = Array("_I", "_II", "_III", "_IV", "_V")
/**
* @param hanyuPinyinStr
* Given unformatted Hanyu Pinyin with tone number
* @return Corresponding Gwoyeu Romatzyh; null if no mapping is found.
*/
def convertHanyuPinyinToGwoyeuRomatzyh(hanyuPinyinStr: String): String = {
val pinyin = TextHelper.extractPinyinString(hanyuPinyinStr)
val toneNumber = TextHelper.extractToneNumber(hanyuPinyinStr)
// return value
var gwoyeuStr: String = null
try {
pinyinToGwoyeuMappingDoc \\ "item" foreach { x =>
if (search(x, pinyin)) {
gwoyeuStr = (x \\ (PinyinRomanizationType.GWOYEU_ROMATZYH.tagName + tones(Integer.parseInt(toneNumber) - 1))).text
}
}
} catch {
case ex: Throwable => ex.printStackTrace
}
gwoyeuStr
}
private def search(p: Node, Name: String): Boolean = p match {
case <Hanyu>{ Text(Name) }</Hanyu> => true
case _ => false
}
}
| dcaoyuan/inloopio-libs | inloopio-util/src/main/scala/inloopio/util/pinyin/GwoyeuRomatzyhConverter.scala | Scala | bsd-3-clause | 1,982 |
package org.apache.mesos.chronos.utils
import org.apache.mesos.chronos.scheduler.jobs.{BaseJob, DependencyBasedJob, ScheduleBasedJob}
import org.apache.mesos.chronos.scheduler.jobs.constraints.EqualsConstraint
import com.fasterxml.jackson.core.JsonGenerator
import com.fasterxml.jackson.databind.{JsonSerializer, SerializerProvider}
/**
* Custom JSON serializer for jobs.
* @author Florian Leibert (flo@leibert.de)
*/
class JobSerializer extends JsonSerializer[BaseJob] {
def serialize(baseJob: BaseJob, json: JsonGenerator, provider: SerializerProvider) {
json.writeStartObject()
json.writeFieldName("name")
json.writeString(baseJob.name)
json.writeFieldName("command")
json.writeString(baseJob.command)
json.writeFieldName("shell")
json.writeBoolean(baseJob.shell)
json.writeFieldName("epsilon")
json.writeString(baseJob.epsilon.toString)
json.writeFieldName("executor")
json.writeString(baseJob.executor)
json.writeFieldName("executorFlags")
json.writeString(baseJob.executorFlags)
json.writeFieldName("retries")
json.writeNumber(baseJob.retries)
json.writeFieldName("owner")
json.writeString(baseJob.owner)
json.writeFieldName("ownerName")
json.writeString(baseJob.ownerName)
json.writeFieldName("description")
json.writeString(baseJob.description)
json.writeFieldName("async")
json.writeBoolean(baseJob.async)
json.writeFieldName("successCount")
json.writeNumber(baseJob.successCount)
json.writeFieldName("errorCount")
json.writeNumber(baseJob.errorCount)
json.writeFieldName("lastSuccess")
json.writeString(baseJob.lastSuccess)
json.writeFieldName("lastError")
json.writeString(baseJob.lastError)
json.writeFieldName("cpus")
json.writeNumber(baseJob.cpus)
json.writeFieldName("disk")
json.writeNumber(baseJob.disk)
json.writeFieldName("mem")
json.writeNumber(baseJob.mem)
json.writeFieldName("disabled")
json.writeBoolean(baseJob.disabled)
json.writeFieldName("softError")
json.writeBoolean(baseJob.softError)
json.writeFieldName("dataProcessingJobType")
json.writeBoolean(baseJob.dataProcessingJobType)
json.writeFieldName("errorsSinceLastSuccess")
json.writeNumber(baseJob.errorsSinceLastSuccess)
json.writeFieldName("uris")
json.writeStartArray()
baseJob.uris.foreach(json.writeString)
json.writeEndArray()
json.writeFieldName("environmentVariables")
json.writeStartArray()
baseJob.environmentVariables.foreach { v =>
json.writeStartObject()
json.writeFieldName("name")
json.writeString(v.name)
json.writeFieldName("value")
json.writeString(v.value)
json.writeEndObject()
}
json.writeEndArray()
json.writeFieldName("arguments")
json.writeStartArray()
baseJob.arguments.foreach(json.writeString)
json.writeEndArray()
json.writeFieldName("highPriority")
json.writeBoolean(baseJob.highPriority)
json.writeFieldName("runAsUser")
json.writeString(baseJob.runAsUser)
if (baseJob.container != null) {
json.writeFieldName("container")
json.writeStartObject()
// TODO: Handle more container types when added.
json.writeFieldName("type")
json.writeString("docker")
json.writeFieldName("image")
json.writeString(baseJob.container.image)
json.writeFieldName("network")
json.writeString(baseJob.container.network.toString)
json.writeFieldName("volumes")
json.writeStartArray()
baseJob.container.volumes.foreach { v =>
json.writeStartObject()
v.hostPath.foreach { hostPath =>
json.writeFieldName("hostPath")
json.writeString(hostPath)
}
json.writeFieldName("containerPath")
json.writeString(v.containerPath)
v.mode.foreach { mode =>
json.writeFieldName("mode")
json.writeString(mode.toString)
}
json.writeEndObject()
}
json.writeEndArray()
json.writeEndObject()
}
json.writeFieldName("constraints")
json.writeStartArray()
baseJob.constraints.foreach { v =>
json.writeStartArray()
v match {
case EqualsConstraint(attribute, value) =>
json.writeString(attribute)
json.writeString(EqualsConstraint.OPERATOR)
json.writeString(value)
}
json.writeEndArray()
}
json.writeEndArray()
baseJob match {
case depJob: DependencyBasedJob =>
json.writeFieldName("parents")
json.writeStartArray()
depJob.parents.foreach(json.writeString)
json.writeEndArray()
case schedJob: ScheduleBasedJob =>
json.writeFieldName("schedule")
json.writeString(schedJob.schedule)
json.writeFieldName("scheduleTimeZone")
json.writeString(schedJob.scheduleTimeZone)
case _ =>
throw new IllegalStateException("The job found was neither schedule based nor dependency based.")
}
json.writeEndObject()
}
}
| anapsix/chronos | src/main/scala/org/apache/mesos/chronos/utils/JobSerializer.scala | Scala | apache-2.0 | 5,070 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.madewithtea.bottledynamo
import io.circe.generic.auto._
import io.circe._
import io.circe.syntax._
import com.twitter.io.{Buf => TBuf}
object Json {
def onFailure(error: io.circe.Error) = {
println(error.getMessage)
error.printStackTrace()
throw error
}
object String {
def encode[T](value: T)(implicit encoder: io.circe.Encoder[T]): String =
value.asJson.noSpaces
def decode[T](value: String)(implicit dec: io.circe.Decoder[T]): T =
parser.parse(value).fold(onFailure, { parsed =>
parsed.as[T].fold[T](onFailure, instance => instance)
})
}
object Buf {
def encode[T](value: T)(implicit encoder: io.circe.Encoder[T]): TBuf = {
val json = value.asJson.noSpaces
TBuf.Utf8(json)
}
def decode[T](value: TBuf)(implicit dec: io.circe.Decoder[T]): T = {
val TBuf.Utf8(json) = value
parser.parse(json).right.get.as[T].right.get
}
}
}
| jpzk/bottledynamo | src/main/scala/Serdes.scala | Scala | apache-2.0 | 1,761 |
package mesosphere.marathon
package storage.migration
import java.nio.charset.StandardCharsets
import java.time.OffsetDateTime
import akka.http.scaladsl.marshalling.Marshaller
import akka.http.scaladsl.unmarshalling.Unmarshaller
import akka.stream.Materializer
import akka.stream.scaladsl.{Sink, Source}
import akka.util.ByteString
import akka.{Done, NotUsed}
import com.typesafe.scalalogging.StrictLogging
import mesosphere.marathon.core.storage.store.impl.zk.{ZkId, ZkSerialized}
import mesosphere.marathon.core.storage.store.{IdResolver, PersistenceStore}
import mesosphere.marathon.state._
import mesosphere.marathon.storage.repository.{StoredGroup, StoredGroupRepositoryImpl}
import mesosphere.marathon.storage.store.ZkStoreSerialization
import play.api.libs.json._
import scala.async.Async.{async, await}
import scala.concurrent.{ExecutionContext, Future}
class MigrationTo19100(defaultMesosRole: Role, persistenceStore: PersistenceStore[ZkId, String, ZkSerialized])
extends MigrationStep
with StrictLogging {
override def migrate()(implicit ctx: ExecutionContext, mat: Materializer): Future[Done] =
async {
logger.info("Starting migration to 1.9.100")
await(MigrationTo19100.migrateApps(defaultMesosRole, persistenceStore))
await(MigrationTo19100.migratePods(defaultMesosRole, persistenceStore))
await(MigrationTo19100.migrateGroups(persistenceStore))
}
}
object MigrationTo19100 extends MaybeStore with StrictLogging {
/**
* Set the default role on the app.
*
* @param appProtos The raw app from store.
* @param optVersion The optional version of the app definition.
* @param defaultMesosRole The default Mesos role to use.
* @return The update app definition.
*/
def migrateApp(
appProtos: Protos.ServiceDefinition,
optVersion: Option[OffsetDateTime],
defaultMesosRole: Role
): (Protos.ServiceDefinition, Option[OffsetDateTime]) = {
logger.info(
s"Migrate App(${appProtos.getId}) with store version $optVersion to role '$defaultMesosRole' (AppVersion: ${appProtos.getVersion})"
)
val newAppProtos = appProtos.toBuilder.setRole(defaultMesosRole).build()
(newAppProtos, optVersion)
}
/**
* Set the default role on the pod.
*
* @param podRaml The raw pod from store.
* @param optVersion The optional version of the pod definition.
* @param defaultMesosRole The default Mesos role to use.
* @return The update pod definition.
*/
def migratePod(podRaml: raml.Pod, optVersion: Option[OffsetDateTime], defaultMesosRole: Role): (raml.Pod, Option[OffsetDateTime]) = {
logger.info(s"Migrate Pod(${podRaml.id}) with store version $optVersion to role '$defaultMesosRole', (Version: ${podRaml.version})")
val newPod = podRaml.copy(role = Some(defaultMesosRole))
(newPod, optVersion)
}
/**
* Recursively sets the enforce role parameter to `false` for all groups.
*
* @param group The current group.
* @return The update group with all it's children updated.
*/
def migrateGroup(group: StoredGroup): StoredGroup = {
// This is not tail-recursive. We might run into a stackoverflow.
group.copy(enforceRole = Some(false), storedGroups = group.storedGroups.map(migrateGroup))
}
/**
* Loads all app definitions from store and sets the role to Marathon's default role.
*
* @param defaultMesosRole The Mesos role define by [[MarathonConf.mesosRole]].
* @param persistenceStore The ZooKeeper storage.
* @return Successful future when done.
*/
def migrateApps(defaultMesosRole: Role, persistenceStore: PersistenceStore[ZkId, String, ZkSerialized])(implicit
ctx: ExecutionContext,
mat: Materializer
): Future[Done] = {
implicit val appProtosUnmarshaller: Unmarshaller[ZkSerialized, Protos.ServiceDefinition] =
Unmarshaller.strict {
case ZkSerialized(byteString) => Protos.ServiceDefinition.parseFrom(byteString.toArray)
}
implicit val appProtosMarshaller: Marshaller[Protos.ServiceDefinition, ZkSerialized] =
Marshaller.opaque(appProtos => ZkSerialized(ByteString(appProtos.toByteArray)))
implicit val appIdResolver: IdResolver[AbsolutePathId, Protos.ServiceDefinition, String, ZkId] =
new ZkStoreSerialization.ZkPathIdResolver[Protos.ServiceDefinition](
"apps",
true,
AppDefinition.versionInfoFrom(_).version.toOffsetDateTime
)
val countingSink: Sink[Done, NotUsed] = Sink.fold[Int, Done](0) { case (count, Done) => count + 1 }.mapMaterializedValue { f =>
f.map(i => logger.info(s"$i apps migrated to 1.9.100"))
NotUsed
}
maybeStore(persistenceStore).map { zkStore =>
zkStore
.ids()
.flatMapConcat(appId => zkStore.versions(appId).map(v => (appId, Some(v))) ++ Source.single((appId, Option.empty[OffsetDateTime])))
.mapAsync(Migration.maxConcurrency) {
case (appId, Some(version)) => zkStore.get(appId, version).map(app => (app, Some(version)))
case (appId, None) => zkStore.get(appId).map(app => (app, Option.empty[OffsetDateTime]))
}
.collect { case (Some(appProtos), optVersion) if !appProtos.hasRole => (appProtos, optVersion) }
.map {
case (appProtos, optVersion) => migrateApp(appProtos, optVersion, defaultMesosRole)
}
.mapAsync(Migration.maxConcurrency) {
case (appProtos, Some(version)) => zkStore.store(AbsolutePathId(appProtos.getId), appProtos, version)
case (appProtos, None) => zkStore.store(AbsolutePathId(appProtos.getId), appProtos)
}
.alsoTo(countingSink)
.runWith(Sink.ignore)
}.getOrElse {
Future.successful(Done)
}
}
/**
* Loads all pod definitions from store and sets the role to Marathon's default role.
*
* @param defaultMesosRole The Mesos role define by [[MarathonConf.mesosRole]].
* @param persistenceStore The ZooKeeper storage.
* @return Successful future when done.
*/
def migratePods(defaultMesosRole: Role, persistenceStore: PersistenceStore[ZkId, String, ZkSerialized])(implicit
ctx: ExecutionContext,
mat: Materializer
): Future[Done] = {
implicit val podIdResolver =
new ZkStoreSerialization.ZkPathIdResolver[raml.Pod]("pods", true, _.version.getOrElse(Timestamp.now().toOffsetDateTime))
implicit val podJsonUnmarshaller: Unmarshaller[ZkSerialized, raml.Pod] =
Unmarshaller.strict {
case ZkSerialized(byteString) => Json.parse(byteString.utf8String).as[raml.Pod]
}
implicit val podRamlMarshaller: Marshaller[raml.Pod, ZkSerialized] =
Marshaller.opaque { podRaml =>
ZkSerialized(ByteString(Json.stringify(Json.toJson(podRaml)), StandardCharsets.UTF_8.name()))
}
val countingSink: Sink[Done, NotUsed] = Sink.fold[Int, Done](0) { case (count, Done) => count + 1 }.mapMaterializedValue { f =>
f.map(i => logger.info(s"$i pods migrated to 1.9.100"))
NotUsed
}
maybeStore(persistenceStore).map { zkStore =>
zkStore
.ids()
.flatMapConcat(podId => zkStore.versions(podId).map(v => (podId, Some(v))) ++ Source.single((podId, Option.empty[OffsetDateTime])))
.mapAsync(Migration.maxConcurrency) {
case (podId, Some(version)) => zkStore.get(podId, version).map(pod => (pod, Some(version)))
case (podId, None) => zkStore.get(podId).map(pod => (pod, Option.empty[OffsetDateTime]))
}
.collect { case (Some(podRaml), optVersion) if podRaml.role.isEmpty => (podRaml, optVersion) }
.map {
case (podRaml, optVersion) => migratePod(podRaml, optVersion, defaultMesosRole)
}
.mapAsync(Migration.maxConcurrency) {
case (podRaml, Some(version)) => zkStore.store(AbsolutePathId(podRaml.id), podRaml, version)
case (podRaml, None) => zkStore.store(AbsolutePathId(podRaml.id), podRaml)
}
.alsoTo(countingSink)
.runWith(Sink.ignore)
}.getOrElse {
Future.successful(Done)
}
}
def migrateGroups(
persistenceStore: PersistenceStore[ZkId, String, ZkSerialized]
)(implicit ctx: ExecutionContext, mat: Materializer): Future[Done] = {
import StoredGroupRepositoryImpl.RootId
import ZkStoreSerialization.{groupIdResolver, groupMarshaller, groupUnmarshaller}
val countingSink: Sink[Done, NotUsed] = Sink.fold[Int, Done](0) { case (count, Done) => count + 1 }.mapMaterializedValue { f =>
f.map(i => logger.info(s"$i groups migrated to 1.9.100"))
NotUsed
}
maybeStore(persistenceStore).map { zkStore =>
zkStore
.versions(RootId)
.map(Some(_))
.concat(Source.single(Option.empty[OffsetDateTime]))
.mapAsync(Migration.maxConcurrency) {
case Some(rootGroupVersion) => zkStore.get(RootId, rootGroupVersion).map(group => (group, Some(rootGroupVersion)))
case None => zkStore.get(RootId).map(group => (group, None))
}
.collect { case (Some(group), optVersion) => (group, optVersion) }
.map {
case (rootGroup, optVersion) => (migrateGroup(rootGroup), optVersion)
}
.mapAsync(Migration.maxConcurrency) {
case (rootGroup, Some(version)) => zkStore.store(RootId, rootGroup, version)
case (rootGroup, None) => zkStore.store(RootId, rootGroup)
}
.alsoTo(countingSink)
.runWith(Sink.ignore)
}.getOrElse {
Future.successful(Done)
}
}
}
| mesosphere/marathon | src/main/scala/mesosphere/marathon/storage/migration/MigrationTo19100.scala | Scala | apache-2.0 | 9,524 |
/*
* Copyright 2016 Miroslav Janíček
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.classdump.luna.test
import java.util.Scanner
import org.classdump.luna.compiler.{CompilerChunkLoader, CompilerSettings}
import org.classdump.luna.compiler.CompilerSettings.CPUAccountingMode
import org.classdump.luna.env.RuntimeEnvironments
import org.classdump.luna.exec.DirectCallExecutor
import org.classdump.luna.impl.StateContexts
import org.classdump.luna.lib.{TableLib, _}
import org.classdump.luna.load.{ChunkClassLoader, ChunkLoader}
import org.classdump.luna.runtime.LuaFunction
import org.classdump.luna.{StateContext, Table, Variable}
import scala.util.Try
object BenchmarkRunner {
val dirPrefix = "/benchmarksgame/"
val NumOfRunsPropertyName = "numRuns"
val DefaultNumOfRuns = 3
val StepSizePropertyName = "stepSize"
val DefaultStepSize = 1000000
val NoCPUAccountingPropertyName = "noCPUAccounting"
val DefaultNoCPUAccounting = false
val ConstFoldingPropertyName = "constFolding"
val ConstCachingPropertyName = "constCaching"
def doFile(prefix: String, stepSize: Int, settings: CompilerSettings, filename: String, args: String*): Unit = {
def initCall() = timed(prefix + "init") {
init(settings, filename, args: _*)
}
val c = initCall()
val executor = DirectCallExecutor.newExecutor()
val before = System.nanoTime()
executor.call(c.state, c.fn)
val after = System.nanoTime()
val totalTimeMillis = (after - before) / 1000000.0
// val totalCPUUnitsSpent = pc.totalCost
// val avgTimePerCPUUnitNanos = (after - before).toDouble / totalCPUUnitsSpent.toDouble
// val avgCPUUnitsPerSecond = (1000000000.0 * totalCPUUnitsSpent) / (after - before)
println(prefix + "Execution took %.1f ms".format(totalTimeMillis))
if (settings.cpuAccountingMode() != CPUAccountingMode.NO_CPU_ACCOUNTING) {
// println(prefix + "Total CPU cost: " + pc.totalCost + " LI")
// println(prefix)
// println(prefix + "Step size: " + stepSize + " LI")
// println(prefix + "Num of steps: " + steps)
// println(prefix + "Avg time per step: %.3f ms".format(totalTimeMillis / steps))
// println(prefix)
// println(prefix + "Avg time per unit: %.2f ns".format(avgTimePerCPUUnitNanos))
// println(prefix + "Avg units per second: %.1f LI/s".format(avgCPUUnitsPerSecond))
}
println()
}
def init(settings: CompilerSettings, filename: String, args: String*) = {
val resourceStream = getClass.getResourceAsStream(filename)
require(resourceStream != null, "resource must exist, is null")
val sourceContents = new Scanner(resourceStream, "UTF-8").useDelimiter("\\\\A").next()
require(sourceContents != null, "source contents must not be null")
val ldr = CompilerChunkLoader.of(new ChunkClassLoader(), settings, "benchmark_")
val state = StateContexts.newDefaultInstance()
val env = initEnv(state, ldr, args)
val func = ldr.loadTextChunk(new Variable(env), "benchmarkMain", sourceContents)
EnvWithMainChunk(state, func)
}
def initEnv(context: StateContext, loader: ChunkLoader, args: Seq[String]): Table = {
val runtimeEnv = RuntimeEnvironments.system()
val env = context.newTable()
BasicLib.installInto(context, env, runtimeEnv, loader)
ModuleLib.installInto(context, env, runtimeEnv, loader, getClass.getClassLoader)
CoroutineLib.installInto(context, env)
MathLib.installInto(context, env)
StringLib.installInto(context, env)
IoLib.installInto(context, env, runtimeEnv)
OsLib.installInto(context, env, runtimeEnv)
Utf8Lib.installInto(context, env)
TableLib.installInto(context, env)
DebugLib.installInto(context, env)
// command-line arguments
val argTable = context.newTable()
for ((a, i) <- args.zipWithIndex) {
argTable.rawset(i + 1, a)
}
env.rawset("arg", argTable)
env
}
def timed[A](name: String)(body: => A): A = {
val before = System.nanoTime()
val result = body
val after = System.nanoTime()
val totalTimeMillis = (after - before) / 1000000.0
println("%s took %.1f ms".format(name, totalTimeMillis))
result
}
def main(args: Array[String]): Unit = {
getSetup(args) match {
case Some(setup) =>
val numRuns = intProperty(NumOfRunsPropertyName, DefaultNumOfRuns)
val stepSize = intProperty(StepSizePropertyName, DefaultStepSize)
val noCPUAccounting = booleanProperty(NoCPUAccountingPropertyName, DefaultNoCPUAccounting)
val constFolding = optBooleanProperty(ConstFoldingPropertyName)
val constCaching = optBooleanProperty(ConstCachingPropertyName)
val requestedSettings = RequestedCompilerSettings(noCPUAccounting, constFolding, constCaching)
val actualSettings = requestedSettings.toCompilerSettings
val bm = Benchmark(dirPrefix + setup.benchmarkFile)
println("file = \\"" + bm.fileName + "\\"")
println("arguments = {")
for (a <- setup.args) {
println("\\t\\"" + a + "\\"")
}
println("}")
println(NumOfRunsPropertyName + " = " + numRuns)
println(NoCPUAccountingPropertyName + " = " + requestedSettings.noCPUAccounting + " (" + actualSettings.cpuAccountingMode() + ")")
println(ConstFoldingPropertyName + " = " + requestedSettings.constFolding + " (" + actualSettings.constFolding() + ")")
println(ConstCachingPropertyName + " = " + requestedSettings.constCaching + " (" + actualSettings.constCaching() + ")")
if (!noCPUAccounting) {
println(StepSizePropertyName + " = " + stepSize)
}
println()
for (i <- 1 to numRuns) {
val prefix = s"#$i\\t"
bm.go(prefix, stepSize, actualSettings, setup.args: _*)
}
case None =>
println("Usage: java " + getClass.getName + " BENCHMARK-FILE [ARG[S...]]")
println("Use the \\"" + NumOfRunsPropertyName + "\\" VM property to set the number of runs (default is " + DefaultNumOfRuns + ").")
println(" \\"" + StepSizePropertyName + "\\" VM property to set the step size (default is " + DefaultStepSize + ").")
println(" \\"" + NoCPUAccountingPropertyName + "\\" VM property (true/false) to turn off CPU accounting (default is " + DefaultNoCPUAccounting + ")")
System.exit(1)
}
}
protected def intProperty(key: String, default: Int): Int = {
Option(System.getProperty(key)) flatMap { s => Try(s.toInt).toOption } getOrElse default
}
protected def booleanProperty(key: String, default: Boolean): Boolean = {
Option(System.getProperty(key)) match {
case Some("true") => true
case _ => false
}
}
protected def optBooleanProperty(key: String): Option[Boolean] = {
Option(System.getProperty(key)) match {
case Some("true") => Some(true)
case Some("false") => Some(false)
case _ => None
}
}
private def getSetup(args: Array[String]): Option[Setup] = {
args.toList match {
case fileName :: tail => Some(Setup(fileName, tail))
case _ => None
}
}
protected def stringProperty(key: String, default: String): String = {
Option(System.getProperty(key)) getOrElse default
}
case class RequestedCompilerSettings(
noCPUAccounting: Boolean,
constFolding: Option[Boolean],
constCaching: Option[Boolean]
) {
def toCompilerSettings: CompilerSettings = {
val s0 = CompilerSettings.defaultSettings()
val s1 = if (noCPUAccounting) s0.withCPUAccountingMode(CompilerSettings.CPUAccountingMode.NO_CPU_ACCOUNTING) else s0
val s2 = constFolding match {
case Some(v) => s1.withConstFolding(v)
case _ => s1
}
val s3 = constCaching match {
case Some(v) => s2.withConstCaching(v)
case _ => s2
}
s3
}
}
case class Benchmark(fileName: String) {
def go(prefix: String, stepSize: Int, settings: CompilerSettings, args: String*): Unit = {
doFile(prefix, stepSize, settings, fileName, args: _*)
}
}
case class EnvWithMainChunk(state: StateContext, fn: LuaFunction[_, _, _, _, _])
private case class Setup(benchmarkFile: String, args: Seq[String]) {
}
}
| kroepke/luna | luna-tests/src/test/scala/org/classdump/luna/test/BenchmarkRunner.scala | Scala | apache-2.0 | 8,965 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.executionplan.builders
import org.neo4j.cypher.internal.compiler.v2_3.commands.expressions.{Identifier, Literal, Property, RangeFunction}
import org.neo4j.cypher.internal.compiler.v2_3.commands.values.TokenType.PropertyKey
import org.neo4j.cypher.internal.compiler.v2_3.commands.{ReturnItem, Slice, SortItem}
import org.neo4j.cypher.internal.compiler.v2_3.executionplan.{ExecutionPlanInProgress, PartiallySolvedQuery}
import org.neo4j.cypher.internal.compiler.v2_3.mutation.{CreateNode, ForeachAction}
import org.neo4j.cypher.internal.compiler.v2_3.pipes.{EmptyResultPipe, SingleRowPipe}
class EmptyResultBuilderTest extends BuilderTest {
def builder = new EmptyResultBuilder
test("should add empty result pipe") {
// Given
val query = PartiallySolvedQuery().copy(
updates = Seq(Unsolved(ForeachAction(RangeFunction(Literal(0), Literal(1), Literal(1)), "n", List(CreateNode("p", Map(), List())))))
)
// When
val plan = assertAccepts(query)
// Then
plan.pipe shouldBe an[EmptyResultPipe]
}
test("should reject when empty result pipe already planned") {
// Given
val query = PartiallySolvedQuery().copy(
updates = Seq(Unsolved(ForeachAction(RangeFunction(Literal(0), Literal(1), Literal(1)), "n", List(CreateNode("p", Map(), List())))))
)
val pipe = EmptyResultPipe(SingleRowPipe())
val planInProgress = ExecutionPlanInProgress(query, pipe, isUpdating = true)
// When / Then
assertRejects(planInProgress)
}
test("should reject when sorting should be done") {
// Given
val query = PartiallySolvedQuery().copy(
sort = Seq(Unsolved(SortItem(Property(Identifier("x"), PropertyKey("y")), ascending = true))),
extracted = true
)
// When / Then
assertRejects(query)
}
test("should reject when skip should be done") {
// Given
val query = PartiallySolvedQuery().copy(
slice = Some(Unsolved(Slice(Some(Literal(10)), None)))
)
// When / Then
assertRejects(query)
}
test("should reject when limit should be done") {
// Given
val query = PartiallySolvedQuery().copy(
slice = Some(Unsolved(Slice(None, Some(Literal(10)))))
)
// When / Then
assertRejects(query)
}
test("should reject when both skip and limit should be done") {
// Given
val query = PartiallySolvedQuery().copy(
slice = Some(Unsolved(Slice(Some(Literal(42)), Some(Literal(42)))))
)
// When / Then
assertRejects(query)
}
test("should reject when query has something to return") {
// Given
val query = PartiallySolvedQuery().copy(
returns = Seq(Unsolved(ReturnItem(Literal("foo"), "foo")))
)
// When / Then
assertRejects(query)
}
test("should reject when query has tail") {
// Given
val query = PartiallySolvedQuery().copy(
tail = Some(PartiallySolvedQuery())
)
// When / Then
assertRejects(query)
}
}
| HuangLS/neo4j | community/cypher/cypher-compiler-2.3/src/test/scala/org/neo4j/cypher/internal/compiler/v2_3/executionplan/builders/EmptyResultBuilderTest.scala | Scala | apache-2.0 | 3,778 |
package edu.gemini.spModel.core
import scalaz._, Scalaz._
/** Newtype for `Angle`, tagging it as a right ascension. */
sealed trait RightAscension extends java.io.Serializable {
/**
* This `RightAscension` as an untagged `Angle`.
* @group Conversions
*/
def toAngle: Angle
/**
* Offset this `RightAscension` by the given angle.
* @group Operations
*/
def offset(a: Angle): RightAscension =
RightAscension.fromAngle(toAngle + a)
/** @group Overrides */
override final def toString =
f"RA($toAngle)"
/** @group Overrides */
override final def equals(a: Any) =
a match {
case ra: RightAscension => ra.toAngle == this.toAngle
case _ => false
}
/** @group Overrides */
override final def hashCode =
toAngle.hashCode
}
object RightAscension {
/**
* Construct a `RightAscension` from an `Angle`.
* @group Constructors
*/
def fromAngle(a: Angle): RightAscension =
new RightAscension {
val toAngle = a
}
/**
* Construct a `RightAscension` from the given value in degrees, which will be normalized to [0, 360).
* @group Constructors
*/
def fromDegrees(d: Double): RightAscension =
fromAngle(Angle.fromDegrees(d))
/**
* Construct a `RightAscension` from the given value in hours, which will be normalized to [0, 24).
* @group Constructors
*/
def fromHours(h: Double): RightAscension =
fromAngle(Angle.fromHours(h))
/**
* The `RightAscension` at zero degrees.
* @group Constructors
*/
val zero: RightAscension =
fromAngle(Angle.zero)
/** @group Typeclass Instances */
implicit val RightAscensionOrder: Order[RightAscension] =
Order.orderBy(_.toAngle)
/** @group Typeclass Instances */
implicit val RightAscensionOrdering: scala.Ordering[RightAscension] =
scala.Ordering.by(_.toAngle)
}
| arturog8m/ocs | bundle/edu.gemini.spModel.core/src/main/scala/edu/gemini/spModel/core/RightAscension.scala | Scala | bsd-3-clause | 1,866 |
package com.eevolution.context.dictionary.infrastructure.repository
import java.util.UUID
import com.eevolution.context.dictionary.domain._
import com.eevolution.context.dictionary.domain.model.ReplicationTable
import com.eevolution.context.dictionary.infrastructure.db.DbContext._
import com.eevolution.utils.PaginatedSequence
import com.lightbend.lagom.scaladsl.persistence.jdbc.JdbcSession
import scala.concurrent.{ExecutionContext, Future}
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: eduardo.moreno@e-evolution.com, http://www.e-evolution.com , http://github.com/e-Evolution
* Created by eduardo.moreno@e-evolution.com , www.e-evolution.com
*/
/**
* Replication Table Repository
* @param session
* @param executionContext
*/
class ReplicationTableRepository (session: JdbcSession)(implicit executionContext: ExecutionContext)
extends api.repository.ReplicationTableRepository[ReplicationTable , Int]
with ReplicationTableMapping {
def getById(id: Int): Future[ReplicationTable] = {
Future(run(queryReplicationTable.filter(_.replicationTableId == lift(id))).headOption.get)
}
def getByUUID(uuid: UUID): Future[ReplicationTable] = {
Future(run(queryReplicationTable.filter(_.uuid == lift(uuid.toString))).headOption.get)
}
def getByReplicationTableId(id : Int) : Future[List[ReplicationTable]] = {
Future(run(queryReplicationTable))
}
def getAll() : Future[List[ReplicationTable]] = {
Future(run(queryReplicationTable))
}
def getAllByPage(page: Int, pageSize: Int): Future[PaginatedSequence[ReplicationTable]] = {
val offset = page * pageSize
val limit = (page + 1) * pageSize
for {
count <- countReplicationTable()
elements <- if (offset > count) Future.successful(Nil)
else selectReplicationTable(offset, limit)
} yield {
PaginatedSequence(elements, page, pageSize, count)
}
}
private def countReplicationTable() = {
Future(run(queryReplicationTable.size).toInt)
}
private def selectReplicationTable(offset: Int, limit: Int): Future[Seq[ReplicationTable]] = {
Future(run(queryReplicationTable).drop(offset).take(limit).toSeq)
}
}
| adempiere/ADReactiveSystem | dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/repository/ReplicationTableRepository.scala | Scala | gpl-3.0 | 2,870 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.dataload
import org.apache.spark.sql.common.util.QueryTest
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.spark.exception.MalformedCarbonCommandException
class TestLoadDataWithMalformedCarbonCommandException extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
sql("CREATE table TestLoadTableOptions (ID int, date String, country String, name String," +
"phonetype String, serialname String, salary int) stored by 'org.apache.carbondata.format'")
}
override def afterAll {
sql("drop table TestLoadTableOptions")
}
def buildTableWithNoExistDictExclude() = {
sql(
"""
CREATE TABLE IF NOT EXISTS t3
(ID Int, date Timestamp, country String,
name String, phonetype String, serialname String, salary Int)
STORED BY 'org.apache.carbondata.format'
TBLPROPERTIES('DICTIONARY_EXCLUDE'='country,phonetype,CCC')
""")
}
def buildTableWithNoExistDictInclude() = {
sql(
"""
CREATE TABLE IF NOT EXISTS t3
(ID Int, date Timestamp, country String,
name String, phonetype String, serialname String, salary Int)
STORED BY 'org.apache.carbondata.format'
TBLPROPERTIES('DICTIONARY_INCLUDE'='AAA,country')
""")
}
def buildTableWithSameDictExcludeAndInclude() = {
sql(
"""
CREATE TABLE IF NOT EXISTS t3
(ID Int, date Timestamp, country String,
name String, phonetype String, serialname String, salary Int)
STORED BY 'org.apache.carbondata.format'
TBLPROPERTIES('DICTIONARY_INCLUDE'='country','DICTIONARY_EXCLUDE'='country')
""")
}
def buildTableWithSameDictExcludeAndIncludeWithSpaces() = {
sql(
"""
CREATE TABLE IF NOT EXISTS t3
(ID Int, date Timestamp, country String,
name String, phonetype String, serialname String, salary Int)
STORED BY 'org.apache.carbondata.format'
TBLPROPERTIES('DICTIONARY_INCLUDE'='country','DICTIONARY_EXCLUDE'='country ')
""")
}
test("test load data with dictionary exclude columns which no exist in table.") {
try {
buildTableWithNoExistDictExclude()
} catch {
case e: MalformedCarbonCommandException =>
assert(e.getMessage.equals("DICTIONARY_EXCLUDE column: ccc does not exist in table. " +
"Please check create table statement."))
case _: Throwable => assert(false)
}
}
test("test load data with dictionary include columns which no exist in table.") {
try {
buildTableWithNoExistDictInclude()
} catch {
case e: MalformedCarbonCommandException =>
assert(e.getMessage.equals("DICTIONARY_INCLUDE column: aaa does not exist in table. " +
"Please check create table statement."))
case _: Throwable => assert(false)
}
}
test("test load data with dictionary include is same with dictionary exclude") {
try {
buildTableWithSameDictExcludeAndInclude()
} catch {
case e: MalformedCarbonCommandException =>
assert(e.getMessage.equals("DICTIONARY_EXCLUDE can not contain the same column: country " +
"with DICTIONARY_INCLUDE. Please check create table statement."))
case _: Throwable => assert(false)
}
}
test("test load data with invalid option") {
try {
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/dataretention1.csv' INTO TABLE " +
"TestLoadTableOptions OPTIONS('QUOTECHAR'='\"', 'DELIMITERRR' = ',')")
assert(false)
} catch {
case e: MalformedCarbonCommandException =>
assert(e.getMessage.equals("Error: Invalid option(s): delimiterrr"))
case _: Throwable => assert(false)
}
}
test("test load data with duplicate options") {
try {
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/dataretention1.csv' INTO TABLE " +
"TestLoadTableOptions OPTIONS('DELIMITER' = ',', 'quotechar'='\"', 'DELIMITER' = '$')")
assert(false)
} catch {
case e: MalformedCarbonCommandException =>
assert(e.getMessage.equals("Error: Duplicate option(s): delimiter"))
case _: Throwable => assert(false)
}
}
test("test load data with case sensitive options") {
try {
sql(
s"LOAD DATA local inpath '$resourcesPath/dataretention1.csv' INTO table " +
"TestLoadTableOptions options('DeLIMITEr'=',', 'qUOtECHAR'='\"')"
)
} catch {
case _: Throwable => assert(false)
}
}
test("test load data with dictionary include is same with dictionary exclude with spaces") {
try {
buildTableWithSameDictExcludeAndIncludeWithSpaces()
} catch {
case e: MalformedCarbonCommandException =>
assert(e.getMessage.equals("DICTIONARY_EXCLUDE can not contain the same column: country " +
"with DICTIONARY_INCLUDE. Please check create table statement."))
case _: Throwable => assert(false)
}
}
}
| Sephiroth-Lin/incubator-carbondata | integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithMalformedCarbonCommandException.scala | Scala | apache-2.0 | 5,855 |
package monocle.law
import monocle.Optional
import monocle.internal.IsEq
import scalaz.Id._
import scalaz.Tags.First
import scalaz.std.option._
import scalaz.syntax.std.option._
import scalaz.syntax.tag._
import scalaz.{@@, Const}
case class OptionalLaws[S, A](optional: Optional[S, A]) {
import IsEq.syntax
def getOptionSet(s: S): IsEq[S] =
optional.getOrModify(s).fold(identity, optional.set(_)(s)) <==> s
def setGetOption(s: S, a: A): IsEq[Option[A]] =
optional.getOption(optional.set(a)(s)) <==> optional.getOption(s).map(_ => a)
def setIdempotent(s: S, a: A): IsEq[S] =
optional.set(a)(optional.set(a)(s)) <==> optional.set(a)(s)
def modifyIdentity(s: S): IsEq[S] =
optional.modify(identity)(s) <==> s
def composeModify(s: S, f: A => A, g: A => A): IsEq[S] =
optional.modify(g)(optional.modify(f)(s)) <==> optional.modify(g compose f)(s)
def consistentSetModify(s: S, a: A): IsEq[S] =
optional.set(a)(s) <==> optional.modify(_ => a)(s)
def consistentModifyModifyId(s: S, f: A => A): IsEq[S] =
optional.modify(f)(s) <==> optional.modifyF(a => id.point(f(a)))(s)
def consistentGetOptionModifyId(s: S): IsEq[Option[A]] =
optional.getOption(s) <==> optional.modifyF[Const[Option[A] @@ First, ?]](a => Const(Some(a).first))(s).getConst.unwrap
} | rperry/Monocle | core/shared/src/main/scala/monocle/law/OptionalLaws.scala | Scala | mit | 1,305 |
package org.phenopackets.pxftools.util
import java.io.InputStream
import java.util.UUID
import scala.collection.JavaConverters._
import scala.collection.mutable
import org.phenopackets.api.PhenoPacket
import org.phenopackets.api.io.RdfReader
import org.phenopackets.api.util.ContextUtil
import org.phenopackets.pxftools.util.PhenoPacketVocabulary._
import org.phenoscape.scowl._
import org.semanticweb.owlapi.apibinding.OWLManager
import org.semanticweb.owlapi.model.AxiomType
import org.semanticweb.owlapi.model.IRI
import com.github.jsonldjava.core.Context
import com.github.tototoshi.csv.CSVReader
import com.github.tototoshi.csv.TSVFormat
import com.hp.hpl.jena.rdf.model.ModelFactory
import com.hp.hpl.jena.rdf.model.Resource
import com.hp.hpl.jena.rdf.model.ResourceFactory
import com.hp.hpl.jena.rdf.model.Statement
import com.hp.hpl.jena.vocabulary.RDF
import com.hp.hpl.jena.vocabulary.RDFS
import com.typesafe.scalalogging.LazyLogging
object HPOAnnotations extends LazyLogging {
def read(stream: InputStream): PhenoPacket = importFromTable(CSVReader.open(scala.io.Source.fromInputStream(stream, "utf-8"))(new TSVFormat {}))
def importFromTable(table: CSVReader): PhenoPacket = {
val packetURI = s"http://model.geneontology.org/${UUID.randomUUID.toString}"
val packet = ResourceFactory.createResource(packetURI)
val triples = table.iteratorWithHeaders.flatMap(rowToTriples(_, packet)).toSeq
val model = ModelFactory.createDefaultModel()
model.add(triples.asJava)
RdfReader.readModel(model, packetURI)
}
private def rowToTriples(row: Map[String, String], packet: Resource): Set[Statement] = {
val statements = mutable.Set.empty[Statement]
row.getOpt("Disease ID").foreach { diseaseID =>
//FIXME this next line causes a pause the first time through
val disease = ResourceFactory.createResource(ContextUtil.expandIdentifierAsValue(tweakID(diseaseID.trim), HPOContext))
statements += ResourceFactory.createStatement(packet, Diseases, disease)
row.getOpt("Disease Name").foreach { diseaseLabel =>
statements += ResourceFactory.createStatement(disease, RDFS.label, ResourceFactory.createTypedLiteral(diseaseLabel.trim))
}
val association = ResourceFactory.createResource()
statements += ResourceFactory.createStatement(packet, PhenotypeProfile, association)
statements += ResourceFactory.createStatement(association, Entity, disease)
val phenotype = ResourceFactory.createResource()
statements += ResourceFactory.createStatement(association, Phenotype, phenotype)
row.getOpt("Phenotype ID").foreach { phenotypeID =>
val phenotypeType = ResourceFactory.createResource(ContextUtil.expandIdentifierAsValue(phenotypeID.trim, HPOContext))
val phenoRelation = if (row.getOpt("Negation ID").exists(_.trim.toUpperCase == "NOT")) {
OWLComplementOf
} else RDF.`type`
statements += ResourceFactory.createStatement(phenotype, phenoRelation, phenotypeType)
row.getOpt("Phenotype Name").foreach { phenotypeLabel =>
statements += ResourceFactory.createStatement(phenotypeType, RDFS.label, ResourceFactory.createTypedLiteral(phenotypeLabel.trim))
}
}
row.getOpt("Age of Onset ID").foreach { onsetID =>
val onsetType = ResourceFactory.createResource(ContextUtil.expandIdentifierAsValue(onsetID.trim, HPOContext))
val onset = ResourceFactory.createResource()
statements += ResourceFactory.createStatement(phenotype, Onset, onset)
statements += ResourceFactory.createStatement(onset, RDF.`type`, onsetType)
row.getOpt("Age of Onset Name").foreach { onsetLabel =>
statements += ResourceFactory.createStatement(onsetType, RDFS.label, ResourceFactory.createTypedLiteral(onsetLabel.trim))
}
}
row.getOpt("Frequency").foreach { frequencyDesc =>
val frequency = ResourceFactory.createResource()
statements += ResourceFactory.createStatement(phenotype, Frequency, frequency)
statements += ResourceFactory.createStatement(frequency, Description, ResourceFactory.createTypedLiteral(frequencyDesc.trim))
}
row.getOpt("Description").foreach { description =>
statements += ResourceFactory.createStatement(phenotype, Description, ResourceFactory.createTypedLiteral(description.trim))
}
if (row.getOpt("Evidence ID").nonEmpty || row.getOpt("Pub").nonEmpty) { //FIXME handle semicolon?
val evidence = ResourceFactory.createResource()
statements += ResourceFactory.createStatement(association, Evidence, evidence)
row.getOpt("Evidence ID").foreach { evidenceID =>
val evidenceTypeOpt = evidenceCodesToURI(evidenceID.trim)
val evidenceType = evidenceTypeOpt.getOrElse {
logger.warn(s"No IRI found for evidence code $evidenceID")
ResourceFactory.createResource(evidenceID.trim)
}
statements += ResourceFactory.createStatement(evidence, RDF.`type`, evidenceType)
row.getOpt("Evidence Name").foreach { evidenceName =>
statements += ResourceFactory.createStatement(evidenceType, RDFS.label, ResourceFactory.createTypedLiteral(evidenceName.trim))
}
}
row.getOpt("Pub").foreach { pubID =>
val pub = ResourceFactory.createResource(ContextUtil.expandIdentifierAsValue(tweakID(pubID.trim), HPOContext))
statements += ResourceFactory.createStatement(evidence, Source, pub)
}
}
row.getOpt("Assigned by").foreach { contributorID =>
val contributor = knownContributors.get(contributorID.trim).getOrElse {
logger.warn(s"No IRI found for contributor $contributorID")
ResourceFactory.createResource(contributorID.trim)
}
statements += ResourceFactory.createStatement(association, Contributor, contributor)
}
row.getOpt("Date Created").flatMap(processDate).foreach { date =>
statements += ResourceFactory.createStatement(association, Date, ResourceFactory.createTypedLiteral(date))
}
}
statements.toSet
}
private val HPOContext: Context = new Context().parse(Map[String, Object](
"obo" -> "http://purl.obolibrary.org/obo/",
"HP" -> "obo:HP_",
"OMIM" -> "obo:OMIM_",
"MIM" -> "obo:OMIM_",
"DOID" -> "obo:DOID_",
"DECIPHER" -> "obo:DECIPHER_").asJava)
//FIXME add in appropriate IRIs once these are available
private val knownEvidenceCodes: Map[String, Resource] = Map(
// "ICE" -> ResourceFactory.createResource(""),
"IEA" -> ResourceFactory.createResource("http://purl.obolibrary.org/obo/ECO_0000501"),
// "ITM" -> ResourceFactory.createResource(""),
// "PCS" -> ResourceFactory.createResource(""),
// "TAE" -> ResourceFactory.createResource(""),
// "TEA" -> ResourceFactory.createResource(""),
"TAS" -> ResourceFactory.createResource("http://purl.obolibrary.org/obo/ECO_0000304"))
/**
* HPO annotations use shorthand labels as evidence IDs
*/
private def evidenceCodesToURI(code: String): Option[Resource] = {
// Avoiding loading of ECO for now since most of the evidence codes in the data are not there
// knownEvidenceCodes.get(code).orElse(evidenceCodesToURIFromECO.get(code))
knownEvidenceCodes.get(code)
}
private lazy val evidenceCodesToURIFromECO: Map[String, Resource] = {
logger.info("Downloading ECO to look for evidence code mappings.")
val manager = OWLManager.createOWLOntologyManager()
val eco = manager.loadOntology(IRI.create("http://purl.obolibrary.org/obo/eco.owl"))
val HasExactSynonym = AnnotationProperty("http://www.geneontology.org/formats/oboInOwl#hasExactSynonym")
(for {
AnnotationAssertion(_, HasExactSynonym, term: IRI, synonym ^^ dt) <- eco.getAxioms(AxiomType.ANNOTATION_ASSERTION).asScala
} yield {
synonym -> ResourceFactory.createResource(term.toString)
}).toMap
}
private def tweakID(id: String): String = {
val doid = "(DO:DOID:)(.+)".r
id match {
case doid(prefix, local) => s"DOID:$local"
case _ => id
}
}
private def processDate(text: String): Option[String] = {
val dateOpt = text.trim match {
case dotDate(d, m, y) => Option(y.toInt, m.toInt, d.toInt)
case dashDate(d, m, y) => Option(y.toInt, months.indexOf(m), d.toInt)
case commaDate(m, d, y) => Option(y.toInt, months.indexOf(m), d.toInt)
case _ => {
logger.warn(s"Unrecognized date format, dropping: $text")
None
}
}
for {
(year, month, day) <- dateOpt
} yield {
val yearPad = f"${year.toInt}%04d"
val monthPad = f"${month.toInt}%02d"
val dayPad = f"${day.toInt}%02d"
s"$yearPad-$monthPad-$dayPad"
}
}
private val months: Seq[String] = Seq("Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec")
private val knownContributors: Map[String, Resource] = Map(
"HPO:probinson" -> ResourceFactory.createResource("http://orcid.org/0000-0002-0736-9199"),
"HPO:skoehler" -> ResourceFactory.createResource("http://orcid.org/0000-0002-5316-1399"))
private val dotDate = raw"(\d\d)\.(\d\d)\.(\d\d\d\d)".r
private val dashDate = raw"(\d+)-(\w\w\w)-(\d\d\d\d)".r
private val commaDate = raw"(\w\w\w) (\d+), (\d\d\d\d)".r
private implicit class NullEmptyStringMap(val self: Map[String, String]) extends AnyVal {
//scala-csv puts empty strings in the result map; convert to None instead
def getOpt(key: String): Option[String] = self.get(key).filter(_.nonEmpty)
}
}
| phenopackets/pxftools | src/main/scala/org/phenopackets/pxftools/util/HPOAnnotations.scala | Scala | bsd-3-clause | 9,643 |
package test
import java.net.InetSocketAddress
import java.nio.channels.SelectionKey
import java.nio.channels.Selector
import java.nio.channels.ServerSocketChannel
import java.nio.channels.SocketChannel
import java.nio.ByteBuffer
import scala.collection.JavaConversions.collectionAsScalaIterable
import scala.util.continuations.reset
import scala.util.continuations.shift
import scala.util.continuations.shiftUnit
object Main extends App {
val selector = Selector.open()
val server = ServerSocketChannel.open()
server.socket().bind(new InetSocketAddress(12345))
server.configureBlocking(false)
reset {
while (true) {
server.accept() match {
case c: SocketChannel =>
reset {
println("Accept: " + c)
c.configureBlocking(false)
while (c.isOpen && c.isConnected) {
val bb = ByteBuffer.allocateDirect(1024)
c.read(bb) match {
case count if count > 0 =>
println("Read: " + c + " count: " + count)
bb.flip
while (bb.hasRemaining) {
c.write(bb) match {
case count if count > 0 =>
println("Write: " + c + " count: " + count)
shiftUnit[Unit, Unit, Unit]()
case count if count == 0 =>
println("WriteBlock: " + c)
shift[Unit, Unit, Unit] { cont =>
c.register(selector, SelectionKey.OP_WRITE, cont)
}
case _ =>
println("WriteError: " + c)
bb.clear()
c.close()
shiftUnit[Unit, Unit, Unit]()
}
}
case count if count == 0 =>
println("ReadBlock: " + c)
shift[Unit, Unit, Unit] { cont =>
c.register(selector, SelectionKey.OP_READ, cont)
}
case _ =>
println("ReadError: " + c)
c.close()
shiftUnit[Unit, Unit, Unit]()
}
}
}
shiftUnit[Unit, Unit, Unit]()
case null =>
println("AcceptBlock")
shift[Unit, Unit, Unit] { cont =>
server.register(selector, SelectionKey.OP_ACCEPT, cont)
}
}
shiftUnit[Unit, Unit, Unit]()
}
}
val keys = selector.selectedKeys
while (true) {
selector.select
keys foreach { k =>
k.interestOps(0)
k.attachment.asInstanceOf[Function1[Unit, Unit]].apply(Unit)
}
keys.clear
}
}
| kghost/scala-continuation-sample | src/main/scala/test/Main.scala | Scala | bsd-2-clause | 2,727 |
package net.sansa_stack.datalake.spark
import java.io.FileNotFoundException
import com.typesafe.scalalogging.Logger
import net.sansa_stack.datalake.spark.utils.Helpers
import net.sansa_stack.datalake.spark.utils.Helpers._
import org.apache.spark.sql.DataFrame
import scala.collection.JavaConverters._
import scala.collection.mutable
class Run[A](executor: QueryExecutor[A]) {
private var finalDataSet: A = _
def application(queryFile: String, mappingsFile: String, configFile: String): DataFrame = {
val logger = Logger("SANSA-DataLake")
// 1. Read SPARQL query
logger.info("QUERY ANALYSIS starting...")
try {
var query = Helpers.readFileFromPath(queryFile)
logger.info(s"Going to execute the query:\\n$query")
// Transformations
var transformExist = false
var transformationsInLine = ""
if (query.contains("TRANSFORM")) {
transformationsInLine = query.substring(query.indexOf("TRANSFORM") + 9, query.lastIndexOf(")")) // E.g. ?k?a.toInt && ?a?l.r.toInt.scl(_+61)
query = query.replace("TRANSFORM" + transformationsInLine + ")", "") // TRANSFORM is not defined in Jena, so remove
transformExist = true
}
// 2. Extract star-shaped BGPs
val qa = new QueryAnalyser(query)
val stars = qa.getStars
val starsNbr = stars._1.size
// Create a map between the variable and its star and predicate URL [variable -> (star,predicate)]
// Need e.g. to create the column to 'SQL ORDER BY' from 'SPARQL ORDER BY'
var variablePredicateStar: Map[String, (String, String)] = Map()
for (v <- stars._1) {
val star = v._1
val predicate_variable_set = v._2
for (pv <- predicate_variable_set) {
val predicate = pv._1
val variable = pv._2
variablePredicateStar += (variable -> (star, predicate))
}
}
logger.info(s"Predicate Star: $variablePredicateStar")
val prefixes = qa.getPrefixes
val (select, distinct) = qa.getProject
val filters = qa.getFilters
val orderBys = qa.getOrderBy
val groupBys = qa.getGroupBy(variablePredicateStar, prefixes)
var limit: Int = 0
if (qa.hasLimit) limit = qa.getLimit
logger.info("- Predicates per star:")
val star_predicate_var = stars._2 // TODO: assuming no (star,predicate) with two vars?
logger.info("star_predicate_var: " + star_predicate_var)
// 3. Generate plan of joins
logger.info("PLAN GENERATION & MAPPINGS")
val pl = new Planner(stars._1)
val pln = pl.generateJoinPlan
val joins = pln._1
val joinedToFlag = pln._2
val joinedFromFlag = pln._3
val joinPairs = pln._4
// 4. Check mapping file
logger.info("---> MAPPING CONSULTATION")
val mappers = new Mapper(mappingsFile)
val results = mappers.findDataSources(stars._1, configFile)
val neededPredicates = pl.getNeededPredicates(star_predicate_var, joins, select, groupBys, prefixes)
val neededPredicatesAll = neededPredicates._1 // all predicates used
val neededPredicatesSelect = neededPredicates._2 // only projected out predicates
logger.info("--> Needed predicates all: " + neededPredicatesAll)
var star_df: Map[String, A] = Map.empty
var starNbrFilters: Map[String, Integer] = Map()
var starDataTypesMap: Map[String, mutable.Set[String]] = Map()
val parsetIDs: Map[String, String] = Map() // Used when subject variables are projected out
logger.info("---> GOING NOW TO COLLECT DATA")
for (s <- results) {
val star = s._1
logger.info("star: " + star)
val dataSources = s._2
val options = s._3
val dataTypes = dataSources.map(d => d._3)
// 'Mappings' transformations
for (ds <- dataSources) {
val transformations = ds._4
if (transformations.nonEmpty) {
transformExist = true
}
for (t <- transformations) {
logger.info("Visiting transformation related to predicate: " + t._1 + " = " + t._2)
val fncParamBits = t._2._1.split(" ")
val fncName = fncParamBits(0)
var fncParam = ""
if (fncParamBits.size > 2) { // E.g., skip 2 producerID
fncParam = fncParamBits(1)
} // otherwise, it's 1 parameter, e.g., toInt producerID
val IDorNot = t._2._2
var lOrR = ""
lOrR = if (IDorNot) "l" else "r"
// Construct the in-line transformation declarations (like 'SPARQL' transformations)
joinPairs.keys.foreach(
x => if (omitQuestionMark(star) == x._1 && joinPairs(x) == t._1) { // Case of predicate transformations
if (transformationsInLine != "") {
transformationsInLine += " && "
}
transformationsInLine += s"?${x._1}?${x._2}.$lOrR.${getFunctionFromURI(fncName)}"
if (fncParam != "") {
transformationsInLine += s"($fncParam)"
}
} else if (omitQuestionMark(star) == x._2) { // Case of ID transformations
if (transformationsInLine != "") {
transformationsInLine += " && "
}
transformationsInLine += s"?${x._1}?${x._2}.$lOrR.${getFunctionFromURI(fncName)}"
if (fncParam != "") {
transformationsInLine += s"($fncParam)"
}
}
)
}
}
if (transformationsInLine != "") {
logger.info(s"Transformations found (inline): $transformationsInLine")
}
starDataTypesMap += (star -> dataTypes)
var parsetIDs : Map[String, String] = Map()
logger.info("Getting DF relevant to the star: " + star)
// Transformations
var leftJoinTransformations: (String, Array[String]) = null
var rightJoinTransformations: Array[String] = null
if (transformExist) {
val (transmap_left, transmap_right) = qa.getTransformations(transformationsInLine)
val str = omitQuestionMark(star)
if (transmap_left.keySet.contains(str)) {
// Get with whom there is a join
val rightOperand = transmap_left(str)._1
val ops = transmap_left(str)._2
// Get the predicate of the join
val joinLeftPredicate = joinPairs((str, rightOperand))
leftJoinTransformations = (joinLeftPredicate, ops)
logger.info("Transform (left) on predicate " + joinLeftPredicate + " using " + ops.mkString("_"))
}
if (transmap_right.keySet.contains(str)) {
rightJoinTransformations = transmap_right(str)
logger.info("Transform (right) ID using " + rightJoinTransformations.mkString("..."))
}
}
if (joinedToFlag.contains(star) || joinedFromFlag.contains(star)) {
val (ds, numberOfFiltersOfThisStar, parsetID) = executor.query(dataSources, options, toJoinWith = true, star, prefixes,
select, star_predicate_var, neededPredicatesAll, filters, leftJoinTransformations, rightJoinTransformations,
joinPairs)
if (parsetID != "") {
parsetIDs += (star -> parsetID)
}
star_df += (star -> ds) // DataFrame representing a star
starNbrFilters += star -> numberOfFiltersOfThisStar
logger.info("join...with ParSet schema: " + ds)
} else if (!joinedToFlag.contains(star) && !joinedFromFlag.contains(star)) {
val (ds, numberOfFiltersOfThisStar, parsetID) = executor.query(dataSources, options, toJoinWith = false, star, prefixes,
select, star_predicate_var, neededPredicatesAll, filters, leftJoinTransformations, rightJoinTransformations,
joinPairs)
// ds.printSchema() // SEE WHAT TO DO HERE TO SHOW BACK THE SCHEMA - MOVE IN SPARKEXECUTOR
parsetIDs += (star -> parsetID)
star_df += (star -> ds) // DataFrame representing a star
starNbrFilters += star -> numberOfFiltersOfThisStar
logger.info("single...with ParSet schema: " + ds)
}
}
logger.info("QUERY EXECUTION starting...")
logger.info(s"DataFrames: $star_df")
if (starsNbr > 1) {
logger.info(s"- Here are the (Star, ParSet) pairs:")
logger.info("Join Pairs: " + joinPairs)
if (starsNbr > 1) logger.info(s"- Here are join pairs: $joins") else logger.info("No join detected.")
logger.info(s"- Number of predicates per star: $starNbrFilters ")
val starWeights = pl.sortStarsByWeight(starDataTypesMap, starNbrFilters, configFile)
logger.info(s"- Stars weighted (performance + nbr of filters): $starWeights")
val sortedScoredJoins = pl.reorder(joins, starDataTypesMap, starNbrFilters, starWeights, configFile)
logger.info(s"- Sorted scored joins: $sortedScoredJoins")
val startingJoin = sortedScoredJoins.head
// Convert starting join to: (leftStar, (rightStar, joinVar)) so we can remove it from $joins
var firstJoin: (String, (String, String)) = null
for (j <- joins.entries.asScala) {
if (j.getKey == startingJoin._1._1 && j.getValue._1 == startingJoin._1._2) {
firstJoin = startingJoin._1._1 -> (startingJoin._1._2, j.getValue._2)
}
}
logger.info(s"- Starting join: $firstJoin")
finalDataSet = executor.join(joins, prefixes, star_df)
// finalDataSet.asInstanceOf[DataFrame].printSchema()
// finalDataSet = executor.joinReordered(joins, prefixes, star_df, firstJoin, starWeights)
} else {
logger.info(s" Single star query")
finalDataSet = star_df.head._2
}
// Project out columns from the final global join results
var columnNames = Seq[String]()
logger.info(s"--> Needed predicates select: $neededPredicatesSelect")
for (i <- neededPredicatesSelect) {
val star = i._1
val ns_predicate = i._2
val bits = get_NS_predicate(ns_predicate)
val selected_predicate = omitQuestionMark(star) + "_" + bits._2 + "_" + prefixes(bits._1)
columnNames = columnNames :+ selected_predicate
}
// Add subjects
for (i <- parsetIDs) {
val star = i._1
val parsetID = i._2
columnNames = columnNames :+ s"${omitQuestionMark(star)}"
}
if (groupBys != null) {
logger.info(s"groupBys: $groupBys")
finalDataSet = executor.groupBy(finalDataSet, groupBys)
// Add aggregation columns to the final project ones
for (gb <- groupBys._2) {
logger.info("-> Add to Project list:" + gb._2)
columnNames = columnNames :+ gb._2 + "(" + gb._1 + ")"
}
}
// TODO: check the order of PROJECT and ORDER-BY
logger.info(s"SELECTED column names: $columnNames")
if (orderBys != null) {
logger.info(s"orderBys: $orderBys")
var orderByList: Set[(String, String)] = Set()
for (o <- orderBys) {
val orderDirection = o._1
val str = variablePredicateStar(o._2)._1
val vr = variablePredicateStar(o._2)._2
val ns_p = get_NS_predicate(vr)
val column = omitQuestionMark(str) + "_" + ns_p._2 + "_" + prefixes(ns_p._1)
orderByList += ((column, orderDirection))
}
// TODO: (-1 ASC, -2 DESC) confirm with multiple order-by's
logger.info(s"ORDER BY list: $orderByList (-1 ASC, -2 DESC)")
for (o <- orderByList) {
val variable = o._1
val direction = o._2
finalDataSet = executor.orderBy(finalDataSet, direction, variable)
}
}
logger.info("|__ Has distinct? " + distinct)
finalDataSet = executor.project(finalDataSet, columnNames, distinct)
if (limit > 0) {
finalDataSet = executor.limit(finalDataSet, limit)
}
// executor.run(finalDataSet)
finalDataSet.asInstanceOf[DataFrame]
} catch {
case ex : FileNotFoundException =>
println("ERROR: One of input files ins't found (Report it: " + ex + ")")
logger.debug(ex.getStackTrace.toString)
null
case ex : org.apache.jena.riot.RiotException =>
println("ERROR: invalid Mappings. Check syntax. (Report it: " + ex + ")")
logger.debug(ex.getStackTrace.toString)
null
case ex : org.apache.spark.SparkException =>
println("ERROR: invalid Spark Master. (Report it: " + ex + ")")
logger.debug(ex.getStackTrace.toString)
null
case ex : com.fasterxml.jackson.core.JsonParseException =>
println("ERROR: invalid JSON content in config file. (Report it: " + ex + ")")
logger.debug(ex.getStackTrace.toString)
null
case ex : java.lang.IllegalArgumentException =>
println("ERROR: invalid mappings. (Report it: " + ex + ")")
logger.debug(ex.getStackTrace.toString)
null
case ex : org.apache.jena.query.QueryParseException =>
println("ERROR: invalid query. (Report it: " + ex + ")")
logger.debug(ex.getStackTrace.toString)
null
case ex : com.amazonaws.services.s3.model.AmazonS3Exception =>
println(ex.getStackTrace)
println("ERROR: Access to Amazon S3 denied. Check bucket name and key. Check you have ~/.aws/credentials file " +
"with the correct content: \\n[default]\\naws_access_key_id=...\\naws_secret_access_key=...")
null
}
}
}
| SANSA-Stack/SANSA-RDF | sansa-datalake/sansa-datalake-spark/src/main/scala/net/sansa_stack/datalake/spark/Run.scala | Scala | apache-2.0 | 13,883 |
package amphip.stoch
import org.junit.Assert._
import org.junit.Test
import amphip.dsl._
class TestNA {
@Test
def testAssignIndicesBasic(): Unit = {
val t = dummy
val s = dummy
val S = set
val T = set
val I = set
val x1 = xvar(T,S)
val (entries1, tA1, sA1) = StochModel.assignIndices(x1.domain.get.entries, T, S, t, s)
assertEquals(List(t,s), entries1.flatMap(_.indices))
assertEquals(tA1, t)
assertEquals(sA1, s)
val i_ = dummy("i_", synthetic = true)
val x2a = xvar(T,S,I)
val (entries2a, tA2a, sA2a) = StochModel.assignIndices(x2a.domain.get.entries, T, S, t, s)
assertEquals(List(t,s,i_), entries2a.flatMap(_.indices))
assertEquals(tA2a, t)
assertEquals(sA2a, s)
val i = dummy
val x2b = xvar(T, S, i in I)
val (entries2b, tA2b, sA2b) = StochModel.assignIndices(x2b.domain.get.entries, T, S, t, s)
assertEquals(List(t,s,i), entries2b.flatMap(_.indices))
assertEquals(tA2b, t)
assertEquals(sA2b, s)
val i1_ = dummy("i1_", synthetic = true)
val x3 = xvar(T, S, I * I)
val (entries3, tA3, sA3) = StochModel.assignIndices(x3.domain.get.entries, T, S, t, s)
assertEquals(List(t, s, i_, i1_), entries3.flatMap(_.indices))
assertEquals(tA3, t)
assertEquals(sA3, s)
val i2_ = dummy("i2_", synthetic = true)
val x4 = xvar(T, S, I * I, I)
val (entries4, tA4, sA4) = StochModel.assignIndices(x4.domain.get.entries, T, S, t, s)
assertEquals(List(t, s, i_, i1_, i2_), entries4.flatMap(_.indices))
assertEquals(tA4, t)
assertEquals(sA4, s)
}
@Test
def testAssignIndicesExplicit(): Unit = {
val t = dummy
val s = dummy
val S = set
val T = set
val i1 = dummy
val i2 = dummy
val x1 = xvar(i1 in T, i2 in S)
val (entries1, tA1, sA1) = StochModel.assignIndices(x1.domain.get.entries, T, S, t, s)
assertEquals(List(i1,i2), entries1.flatMap(_.indices))
assertEquals(tA1, i1)
assertEquals(sA1, i2)
val x2a = xvar(i1 in T, S)
val (entries2a, tA2a, sA2a) = StochModel.assignIndices(x2a.domain.get.entries, T, S, t, s)
assertEquals(List(i1,s), entries2a.flatMap(_.indices))
assertEquals(tA2a, i1)
assertEquals(sA2a, s)
val x2b = xvar(T, i2 in S)
val (entries2b, tA2b, sA2b) = StochModel.assignIndices(x2b.domain.get.entries, T, S, t, s)
assertEquals(List(t,i2), entries2b.flatMap(_.indices))
assertEquals(tA2b, t)
assertEquals(sA2b, i2)
}
@Test
def testAssignIndicesOverlapping(): Unit = {
val t_ = dummy("t_", synthetic = true)
val s_ = dummy("s_", synthetic = true)
val t = dummy
val s = dummy
val S = set
val T = set
val I = set
val x1 = xvar(T, S, t in I)
val (entries1, tA1, sA1) = StochModel.assignIndices(x1.domain.get.entries, T, S, t, s)
assertEquals(List(t_, s, t), entries1.flatMap(_.indices))
assertEquals(tA1, t_)
assertEquals(sA1, s)
val x2 = xvar(T, S, s in I)
val (entries2, tA2, sA2) = StochModel.assignIndices(x2.domain.get.entries, T, S, t, s)
assertEquals(List(t, s_, s), entries2.flatMap(_.indices))
assertEquals(tA2, t)
assertEquals(sA2, s_)
val x3 = xvar(T, S, t in I, s in I)
val (entries3, tA3, sA3) = StochModel.assignIndices(x3.domain.get.entries, T, S, t, s)
assertEquals(List(t_, s_, t, s), entries3.flatMap(_.indices))
assertEquals(tA3, t_)
assertEquals(sA3, s_)
}
@Test
def testUniqueDummy(): Unit = {
val t = dummy
val s = dummy
val S = set
val T = set
val x1 = xvar(t in T, s in S)
val s1 = dummy("s1", synthetic = true)
val s2 = dummy("s2", synthetic = true)
val x1s1 = nonanticipativity.uniqueDummy(x1.domain.get.entries.flatMap(_.indices), s, 1)
val x1s2 = nonanticipativity.uniqueDummy(x1.domain.get.entries.flatMap(_.indices), s, 2)
assertEquals(List(s1, s2), List(x1s1, x1s2))
val x2 = xvar(s1 in T, s in S)
val s11 = dummy("s11", synthetic = true)
val x2s1 = nonanticipativity.uniqueDummy(x2.domain.get.entries.flatMap(_.indices), s, 1)
val x2s2 = nonanticipativity.uniqueDummy(x2.domain.get.entries.flatMap(_.indices), s, 2)
assertEquals(List(s11, s2), List(x2s1, x2s2))
val I = set
val s1a = dummy
val x3 = xvar(s1 in T, s in S, s1a in I, s2 in I)
val s1a1 = dummy("s1a1", synthetic = true)
val s22 = dummy("s22" , synthetic = true)
val x3s1 = nonanticipativity.uniqueDummy(x3.domain.get.entries.flatMap(_.indices), s, 1)
val x3s2 = nonanticipativity.uniqueDummy(x3.domain.get.entries.flatMap(_.indices), s, 2)
assertEquals(List(s1a1, s22), List(x3s1, x3s2))
}
} | gerferra/amphip | core/src/test/scala/amphip/stoch/TestNA.scala | Scala | mpl-2.0 | 4,675 |
/**
* Angles
* Copyright (C) 2015 Jakob Hendeß, Niklas Wolber
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package io.ssc.angles.pipeline
import com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException
import io.ssc.angles.pipeline.data.{Storage, TwitterApi}
import org.joda.time.DateTime
import org.slf4j.LoggerFactory
import twitter4j.{ResponseList, User}
import scala.collection.JavaConversions._
class FetchExplorers extends Step {
val log = LoggerFactory.getLogger(classOf[FetchExplorers])
override def execute(since: DateTime): Unit = {
log.info("Fetching explorers ...")
val twitterApi = TwitterApi.connect()
val userIds = Storage.notCrawledExplorers().toArray
log.info("{} explorers to fetch", userIds.length)
if (userIds.length == 0)
return
userIds
.grouped(100)
.foreach { case users =>
val explorers: ResponseList[User] = twitterApi.users().lookupUsers(users)
for (explorer: User <- explorers) {
try {
Storage.saveExplorer(explorer)
log.info("Fetched {}", explorer.getScreenName)
} catch {
case _: MySQLIntegrityConstraintViolationException => log.info("Error while saving explorer, duplicate?")
}
}
}
}
}
| nwolber/angles | src/main/scala/io/ssc/angles/pipeline/FetchExplorers.scala | Scala | gpl-3.0 | 1,899 |
package pl.newicom.eventstore.json
import java.nio.charset.Charset
import akka.actor._
import akka.persistence.eventstore.snapshot.EventStoreSnapshotStore.SnapshotEvent
import akka.persistence.eventstore.snapshot.EventStoreSnapshotStore.SnapshotEvent.Snapshot
import akka.persistence.{PersistentRepr, SnapshotMetadata}
import akka.serialization.{Serialization, SerializationExtension, SerializerWithStringManifest}
import org.json4s.Extraction.decompose
import org.json4s.JsonAST._
import org.json4s.ext.EnumNameSerializer
import org.json4s.native.Serialization.{read, write}
import org.json4s.{CustomSerializer, Formats, FullTypeHints, JValue, Serializer, TypeInfo}
import pl.newicom.dddd.aggregate.Command
import pl.newicom.dddd.delivery.protocol.Processed
import pl.newicom.dddd.delivery.protocol.alod.{Processed => AlodProcessed}
import pl.newicom.dddd.messaging.{MetaData, PublisherTypeValue}
import pl.newicom.dddd.process.CommandEnqueued
import pl.newicom.dddd.scheduling.{EventScheduled, ScheduledEventMetadata}
import pl.newicom.dddd.serialization.{JsonExtraSerHints, JsonSerHints}
import pl.newicom.dddd.serialization.JsonSerHints._
/**
* The reason for using Extension mechanism is that
* pl.newicom.eventstore.json.JsonSerializerExtensionImpl.ActorRefSerializer
* requires access to ExtendedActorSystem
*
* @param system ExtendedActorSystem (injected automatically by Akka)
*/
class JsonSerializerExtensionImpl(system: ExtendedActorSystem) extends Extension {
val extraHints = JsonExtraSerHints(
typeHints =
FullTypeHints(
List(classOf[MetaData], classOf[Processed], classOf[AlodProcessed], classOf[PersistentRepr], classOf[EventScheduled], classOf[CommandEnqueued])),
serializers =
List(ActorRefSerializer, ActorPathSerializer, new ScheduledEventSerializer, new EnqueuedCommandSerializer, SnapshotJsonSerializer(system), new EnumNameSerializer(PublisherTypeValue))
)
val UTF8: Charset = Charset.forName("UTF-8")
def fromBinary[A](bytes: Array[Byte], clazz: Class[A], hints: JsonSerHints): A = {
implicit val formats: Formats = hints ++ extraHints
implicit val manifest: Manifest[A] = Manifest.classType(clazz)
try {
read(new String(bytes, UTF8))
} catch {
case th: Throwable =>
th.printStackTrace()
throw th;
}
}
def toBinary(o: AnyRef, hints: JsonSerHints): Array[Byte] = {
implicit val formats: Formats = hints ++ extraHints
write(o).getBytes(UTF8)
}
object ActorRefSerializer extends CustomSerializer[ActorRef](_ => (
{
case JString(s) => system.provider.resolveActorRef(s)
case JNull => null
},
{
case x: ActorRef => JString(Serialization.serializedActorPath(x))
}
))
}
object JsonSerializerExtension extends ExtensionId[JsonSerializerExtensionImpl] with ExtensionIdProvider {
override def createExtension(system: ExtendedActorSystem) = new JsonSerializerExtensionImpl(system)
override def lookup(): ExtensionId[_ <: Extension] = JsonSerializerExtension
override def get(system: ActorSystem): JsonSerializerExtensionImpl = super.get(system)
}
object ActorPathSerializer extends CustomSerializer[ActorPath](_ => (
{ case JString(s) => ActorPath.fromString(s) },
{ case x: ActorPath => JString(x.toSerializationFormat) }
))
class ScheduledEventSerializer extends Serializer[EventScheduled] {
val Clazz: Class[EventScheduled] = classOf[EventScheduled]
def deserialize(implicit formats: Formats): PartialFunction[(TypeInfo, JValue), EventScheduled] = {
case (TypeInfo(Clazz, _), JObject(List(
JField("metadata", metadata),
JField("eventClass", JString(eventClassName)),
JField("event", event)))) =>
val eventClass = Class.forName(eventClassName)
val eventObj = event.extract[AnyRef](formats, Manifest.classType(eventClass))
val metadataObj = metadata.extract[ScheduledEventMetadata]
EventScheduled(metadataObj, eventObj)
}
def serialize(implicit formats: Formats): PartialFunction[Any, JObject] = {
case EventScheduled(metadata, event) =>
JObject(
"jsonClass" -> JString(classOf[EventScheduled].getName),
"metadata" -> decompose(metadata),
"eventClass" -> JString(event.getClass.getName),
"event" -> decompose(event)
)
}
}
class EnqueuedCommandSerializer extends Serializer[CommandEnqueued] {
val Clazz: Class[CommandEnqueued] = classOf[CommandEnqueued]
def deserialize(implicit formats: Formats): PartialFunction[(TypeInfo, JValue), CommandEnqueued] = {
case (TypeInfo(Clazz, _), JObject(List(
JField("officeId", JString(officeId)),
JField("department", JString(department)),
JField("commandClass", JString(commandClassName)),
JField("command", command)))) =>
val commandClass = Class.forName(commandClassName)
val commandObj = command.extract[Command](formats, Manifest.classType(commandClass))
CommandEnqueued(commandObj, officeId, department)
}
def serialize(implicit formats: Formats): PartialFunction[Any, JObject] = {
case CommandEnqueued(command, officeId, department) =>
JObject(
"jsonClass" -> JString(classOf[CommandEnqueued].getName),
"officeId" -> JString(officeId),
"department" -> JString(department),
"commandClass" -> JString(command.getClass.getName),
"command" -> decompose(command)
)
}
}
case class SnapshotDataSerializationResult(data: String, serializerId: Option[Int], manifest: String)
case class SnapshotJsonSerializer(sys: ActorSystem) extends Serializer[SnapshotEvent] {
val Clazz: Class[SnapshotEvent] = classOf[SnapshotEvent]
val EmptySerializerId: Int = 0
import akka.serialization.{Serialization => SysSerialization}
lazy val serialization: SysSerialization = SerializationExtension(sys)
def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), SnapshotEvent] = {
case (TypeInfo(Clazz, _), JObject(List(
JField("dataClass", JString(dataClass)),
JField("dataSerializerId", JInt(serializerId)),
JField("data", JString(x)),
JField("metadata", metadata)))) =>
import Base64._
val data = if (serializerId.intValue == EmptySerializerId) {
serialization.deserialize(x.toByteArray, Class.forName(dataClass)).get
} else {
serialization.deserialize(x.toByteArray, serializerId.intValue, dataClass).get
}
val metaData = metadata.extract[SnapshotMetadata]
Snapshot(data, metaData)
}
def serialize(implicit format: Formats): PartialFunction[Any, JObject] = {
case Snapshot(data, metadata) =>
val serResult = data match {
case data: AnyRef =>
val serializer = serialization.serializerFor(data.getClass)
val manifest:String = serializer match {
case s:SerializerWithStringManifest => s.manifest(data.asInstanceOf[AnyRef])
case _ => data.getClass.getName
}
import Base64._
SnapshotDataSerializationResult(serializer.toBinary(data).toBase64, Some(serializer.identifier), manifest)
case _ =>
SnapshotDataSerializationResult(data.toString, None, classOf[String].getName)
}
JObject(
"jsonClass" -> JString(Clazz.getName),
"dataClass" -> JString(serResult.manifest),
"dataSerializerId" -> JInt(serResult.serializerId.getOrElse[Int](EmptySerializerId)),
"data" -> JString(serResult.data),
"metadata" -> decompose(metadata)
)
}
}
| pawelkaczor/akka-ddd | eventstore-akka-persistence/src/main/scala/pl/newicom/eventstore/json/JsonSerializerExtension.scala | Scala | mit | 7,731 |
package spark.executor
import java.io.{File, FileOutputStream}
import java.net.{URL, URLClassLoader}
import java.util.concurrent._
import scala.collection.mutable.ArrayBuffer
import spark.broadcast._
import spark.scheduler._
import spark._
import java.nio.ByteBuffer
/**
* The Mesos executor for Spark.
*/
class Executor extends Logging {
var classLoader: ClassLoader = null
var threadPool: ExecutorService = null
var env: SparkEnv = null
val EMPTY_BYTE_BUFFER = ByteBuffer.wrap(new Array[Byte](0))
initLogging()
def initialize(slaveHostname: String, properties: Seq[(String, String)]) {
// Make sure the local hostname we report matches the cluster scheduler's name for this host
Utils.setCustomHostname(slaveHostname)
// Set spark.* system properties from executor arg
for ((key, value) <- properties) {
System.setProperty(key, value)
}
// Initialize Spark environment (using system properties read above)
env = SparkEnv.createFromSystemProperties(slaveHostname, 0, false, false)
SparkEnv.set(env)
// Create our ClassLoader (using spark properties) and set it on this thread
classLoader = createClassLoader()
Thread.currentThread.setContextClassLoader(classLoader)
// Start worker thread pool
threadPool = new ThreadPoolExecutor(
1, 128, 600, TimeUnit.SECONDS, new SynchronousQueue[Runnable])
}
def launchTask(context: ExecutorBackend, taskId: Long, serializedTask: ByteBuffer) {
threadPool.execute(new TaskRunner(context, taskId, serializedTask))
}
class TaskRunner(context: ExecutorBackend, taskId: Long, serializedTask: ByteBuffer)
extends Runnable {
override def run() {
SparkEnv.set(env)
Thread.currentThread.setContextClassLoader(classLoader)
val ser = SparkEnv.get.closureSerializer.newInstance()
logInfo("Running task ID " + taskId)
context.statusUpdate(taskId, TaskState.RUNNING, EMPTY_BYTE_BUFFER)
try {
SparkEnv.set(env)
Thread.currentThread.setContextClassLoader(classLoader)
Accumulators.clear()
val task = ser.deserialize[Task[Any]](serializedTask, classLoader)
logInfo("Its generation is " + task.generation)
env.mapOutputTracker.updateGeneration(task.generation)
val value = task.run(taskId.toInt)
val accumUpdates = Accumulators.values
val result = new TaskResult(value, accumUpdates)
val serializedResult = ser.serialize(result)
logInfo("Serialized size of result for " + taskId + " is " + serializedResult.limit)
context.statusUpdate(taskId, TaskState.FINISHED, serializedResult)
env.eventReporter.reportTaskChecksum(task, accumUpdates, serializedResult.array)
logInfo("Finished task ID " + taskId)
} catch {
case ffe: FetchFailedException => {
val reason = ffe.toTaskEndReason
context.statusUpdate(taskId, TaskState.FAILED, ser.serialize(reason))
}
case t: Throwable => {
val reason = ExceptionFailure(t)
context.statusUpdate(taskId, TaskState.FAILED, ser.serialize(reason))
// TODO: Should we exit the whole executor here? On the one hand, the failed task may
// have left some weird state around depending on when the exception was thrown, but on
// the other hand, maybe we could detect that when future tasks fail and exit then.
logError("Exception in task ID " + taskId, t)
env.eventReporter.reportException(t, taskId)
//System.exit(1)
}
}
}
}
/**
* Create a ClassLoader for use in tasks, adding any JARs specified by the user or any classes
* created by the interpreter to the search path
*/
private def createClassLoader(): ClassLoader = {
var loader = this.getClass.getClassLoader
// If any JAR URIs are given through spark.jar.uris, fetch them to the
// current directory and put them all on the classpath. We assume that
// each URL has a unique file name so that no local filenames will clash
// in this process. This is guaranteed by ClusterScheduler.
val uris = System.getProperty("spark.jar.uris", "")
val localFiles = ArrayBuffer[String]()
for (uri <- uris.split(",").filter(_.size > 0)) {
val url = new URL(uri)
val filename = url.getPath.split("/").last
downloadFile(url, filename)
localFiles += filename
}
if (localFiles.size > 0) {
val urls = localFiles.map(f => new File(f).toURI.toURL).toArray
loader = new URLClassLoader(urls, loader)
}
// If the REPL is in use, add another ClassLoader that will read
// new classes defined by the REPL as the user types code
val classUri = System.getProperty("spark.repl.class.uri")
if (classUri != null) {
logInfo("Using REPL class URI: " + classUri)
loader = {
try {
val klass = Class.forName("spark.repl.ExecutorClassLoader")
.asInstanceOf[Class[_ <: ClassLoader]]
val constructor = klass.getConstructor(classOf[String], classOf[ClassLoader])
constructor.newInstance(classUri, loader)
} catch {
case _: ClassNotFoundException => loader
}
}
}
return loader
}
// Download a file from a given URL to the local filesystem
private def downloadFile(url: URL, localPath: String) {
val in = url.openStream()
val out = new FileOutputStream(localPath)
Utils.copyStream(in, out, true)
}
}
| ankurdave/arthur | core/src/main/scala/spark/executor/Executor.scala | Scala | bsd-3-clause | 5,508 |
package models
/*
import java.util.UUID
import javax.inject.Inject
import models.daos.UserDAOImpl
import scala.concurrent.Await
import scala.concurrent.duration._
import play.api.db.Databases
import play.api.inject.guice.GuiceApplicationBuilder
import com.google.inject.AbstractModule
import com.mohiva.play.silhouette.api.{ Environment, LoginInfo }
import com.mohiva.play.silhouette.test._
import net.codingwell.scalaguice.ScalaModule
import org.specs2.mock.Mockito
import org.specs2.specification.Scope
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.libs.concurrent.Execution.Implicits._
import play.api.test.{ FakeRequest, PlaySpecification, WithApplication }
import utils.auth.DefaultEnv
/**
* Test case for the [[models.daos.UserDAOImpl]] class.
*/
class UserDAOImplSpec @Inject() (users: UserDAOImpl) extends PlaySpecification with Mockito {
import models.User
val application = new GuiceApplicationBuilder()
.overrides(bind[DatabaseConfigProvider].to[MockComponent])
.build
val testUser = User(
userID = UUID.randomUUID(),
loginInfo = LoginInfo("", ""),
email = Some("a@a.com"),
classYear = 0,
activated = false)
val database = Databases.withInMemory(
name = "mydatabase",
urlOptions = Map("MODE" -> "MYSQL"),
config = Map("logStatements" -> true)) { database =>
sequential
"The `User` DAO" should {
"Be able to save a new User" in {
Await.result(users.save(testUser), 10000 nanos) shouldEqual testUser
}
}
}
}
*/
| yoo-haemin/hufs-planner | project/test/models/UserDAOImplSpec.scala | Scala | agpl-3.0 | 1,552 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.sumologic.shellbase.commands
import com.sumologic.shellbase.ShellCommand
import jline.console.ConsoleReader
import org.apache.commons.cli.CommandLine
class ClearCommand extends ShellCommand("clear", "Clear the screen.") {
def execute(cmdLine: CommandLine) = {
new ConsoleReader().clearScreen
true
}
}
| SumoLogic/shellbase | shellbase-core/src/main/scala/com/sumologic/shellbase/commands/ClearCommand.scala | Scala | apache-2.0 | 1,134 |
package mesosphere.marathon
import mesosphere.UnitTest
import mesosphere.marathon.state.ResourceRole
import mesosphere.marathon.test.MarathonTestHelper
import mesosphere.marathon.ZookeeperConf.ZkUrl
import scala.util.{Failure, Try}
class MarathonConfTest extends UnitTest {
private[this] val principal = "foo"
private[this] val secretFile = "/bar/baz"
"MarathonConf" should {
"MesosAuthenticationIsOptional" in {
val conf = MarathonTestHelper.makeConfig(
"--master", "127.0.0.1:5050"
)
assert(conf.mesosAuthenticationPrincipal.isEmpty)
assert(conf.mesosAuthenticationSecretFile.isEmpty)
assert(conf.checkpoint.toOption == Some(true))
}
"MesosAuthenticationPrincipal" in {
val conf = MarathonTestHelper.makeConfig(
"--master", "127.0.0.1:5050",
"--mesos_authentication_principal", principal
)
assert(conf.mesosAuthenticationPrincipal.isDefined)
assert(conf.mesosAuthenticationPrincipal.toOption == Some(principal))
assert(conf.mesosAuthenticationSecretFile.isEmpty)
}
"MesosAuthenticationSecretFile" in {
val conf = MarathonTestHelper.makeConfig(
"--master", "127.0.0.1:5050",
"--mesos_authentication_principal", principal,
"--mesos_authentication_secret_file", secretFile
)
assert(conf.mesosAuthenticationPrincipal.isDefined)
assert(conf.mesosAuthenticationPrincipal.toOption == Some(principal))
assert(conf.mesosAuthenticationSecretFile.isDefined)
assert(conf.mesosAuthenticationSecretFile.toOption == Some(secretFile))
}
"--master" should {
"allow a valid zookeeper URL" in {
val conf = MarathonTestHelper.makeConfig("--master", "zk://127.0.0.1:2181/mesos")
conf.mesosMaster() shouldBe MarathonConf.MesosMasterConnection.Zk(ZkUrl.parse("zk://127.0.0.1:2181/mesos").right.get)
}
"reject an invalid zookeeper URL" in {
Try(MarathonTestHelper.makeConfig("--master", "zk://127.0.0.1:lol/mesos")).isFailure shouldBe true
}
"allows an HTTP URL" in {
val conf = MarathonTestHelper.makeConfig("--master", "http://127.0.0.1:5050")
conf.mesosMaster() shouldBe MarathonConf.MesosMasterConnection.Http(new java.net.URL("http://127.0.0.1:5050"))
}
"allows an unspecified protocol" in {
val conf = MarathonTestHelper.makeConfig("--master", "127.0.0.1:5050")
conf.mesosMaster() shouldBe MarathonConf.MesosMasterConnection.Unspecified("127.0.0.1:5050")
}
}
"Secret can be specified directly" in {
val conf = MarathonTestHelper.makeConfig(
"--master", "127.0.0.1:5050",
"--mesos_authentication_principal", principal,
"--mesos_authentication_secret", "top secret"
)
assert(conf.mesosAuthenticationSecretFile.isEmpty)
assert(conf.mesosAuthenticationPrincipal.toOption.contains(principal))
assert(conf.mesosAuthenticationSecret.toOption.contains("top secret"))
}
"Secret and SecretFile can not be specified at the same time" in {
Try(MarathonTestHelper.makeConfig(
"--master", "127.0.0.1:5050",
"--mesos_authentication_principal", principal,
"--mesos_authentication_secret", "top secret",
"--mesos_authentication_secret_file", secretFile
)) match {
case Failure(ex) => ex.getMessage should include("There should be only one or zero of the following options: mesos_authentication_secret, mesos_authentication_secret_file")
case _ => fail("Should give an error")
}
}
"HA mode is enabled by default" in {
val conf = MarathonTestHelper.defaultConfig()
assert(conf.highlyAvailable())
}
"Disable HA mode" in {
val conf = MarathonTestHelper.makeConfig(
"--master", "127.0.0.1:5050",
"--disable_ha"
)
assert(!conf.highlyAvailable())
}
"Checkpointing is enabled by default" in {
val conf = MarathonTestHelper.defaultConfig()
assert(conf.checkpoint())
}
"Disable checkpointing" in {
val conf = MarathonTestHelper.makeConfig(
"--master", "127.0.0.1:5050",
"--disable_checkpoint"
)
assert(!conf.checkpoint())
}
"--default_accepted_resource_roles *,marathon will fail without --mesos_role marathon" in {
val triedConfig = Try(MarathonTestHelper.makeConfig(
"--master", "127.0.0.1:5050",
"--default_accepted_resource_roles", "*,marathon"
)
)
assert(triedConfig.isFailure)
triedConfig match {
case Failure(e) if e.getMessage ==
"requirement failed: " +
"--default_accepted_resource_roles contains roles for which we will not receive offers: marathon" =>
case other =>
fail(s"unexpected triedConfig: $other")
}
}
"--default_accepted_resource_roles *,marathon with --mesos_role marathon" in {
val conf = MarathonTestHelper.makeConfig(
"--master", "127.0.0.1:5050",
"--mesos_role", "marathon",
"--default_accepted_resource_roles", "*,marathon"
)
assert(conf.defaultAcceptedResourceRolesSet == Set(ResourceRole.Unreserved, "marathon"))
}
"--default_accepted_resource_roles *" in {
val conf = MarathonTestHelper.makeConfig(
"--master", "127.0.0.1:5050",
"--default_accepted_resource_roles", "*"
)
assert(conf.defaultAcceptedResourceRolesSet == Set(ResourceRole.Unreserved))
}
"--default_accepted_resource_roles default without --mesos_role" in {
val conf = MarathonTestHelper.makeConfig(
"--master", "127.0.0.1:5050"
)
assert(conf.defaultAcceptedResourceRolesSet == Set(ResourceRole.Unreserved))
}
"--default_accepted_resource_roles default with --mesos_role" in {
val conf = MarathonTestHelper.makeConfig(
"--master", "127.0.0.1:5050",
"--mesos_role", "marathon"
)
assert(conf.defaultAcceptedResourceRolesSet == Set(ResourceRole.Unreserved, "marathon"))
}
}
}
| gsantovena/marathon | src/test/scala/mesosphere/marathon/MarathonConfTest.scala | Scala | apache-2.0 | 6,067 |
package im.actor.server.push.google
import akka.actor.{ ActorRef, ActorSystem }
import akka.event.Logging
import akka.stream.actor.ActorPublisher
import akka.stream.scaladsl.{ Flow, Source }
import akka.{ Done, NotUsed }
import cats.data.Xor
import im.actor.server.push.google.GooglePushDelivery.Delivery
import io.circe.parser
import spray.client.pipelining._
import spray.http.{ HttpCharsets, StatusCodes }
import scala.concurrent.Future
import scala.util.{ Failure, Success }
private[google] final class DeliveryStream(publisher: ActorRef, serviceName: String, remove: String ⇒ Future[_])(implicit system: ActorSystem) {
import system.dispatcher
private val log = Logging(system, getClass)
private implicit val mat = tolerantMaterializer
log.debug("Starting {} stream", serviceName)
val stream: Future[Done] = Source
.fromPublisher(ActorPublisher[NotificationDelivery](publisher))
.via(flow)
.runForeach {
// TODO: flatten
case Xor.Right((body, delivery)) ⇒
parser.parse(body) match {
case Xor.Right(json) ⇒
json.asObject match {
case Some(obj) ⇒
obj("error") flatMap (_.asString) match {
case Some("InvalidRegistration") ⇒
log.warning("{}: Invalid registration, deleting", serviceName)
remove(delivery.m.to)
case Some("NotRegistered") ⇒
log.warning("{}: Token is not registered, deleting", serviceName)
remove(delivery.m.to)
case Some(other) ⇒
log.warning("{}: Error in response: {}", serviceName, other)
case None ⇒
log.debug("{}: Successfully delivered: {}", serviceName, delivery)
}
case None ⇒
log.error("{}: Expected JSON Object but got: {}", serviceName, json)
}
case Xor.Left(failure) ⇒ log.error(failure.underlying, "{}: Failed to parse response", serviceName)
}
case Xor.Left(e) ⇒
log.error(e, "{}: Failed to make request", serviceName)
}
stream onComplete {
case Failure(e) ⇒
log.error(e, "{}: Failure in stream", serviceName)
case Success(_) ⇒
log.debug("{}: Stream completed", serviceName)
}
private def flow(implicit system: ActorSystem): Flow[NotificationDelivery, Xor[RuntimeException, (String, Delivery)], NotUsed] = {
import system.dispatcher
val pipeline = sendReceive
Flow[NotificationDelivery].mapAsync(2) {
case (req, del) ⇒
pipeline(req) map { resp ⇒
if (resp.status == StatusCodes.OK)
Xor.Right(resp.entity.data.asString(HttpCharsets.`UTF-8`) → del)
else
Xor.Left(new RuntimeException(s"Failed to deliver message, StatusCode was not OK: ${resp.status}"))
}
}
}
}
| ufosky-server/actor-platform | actor-server/actor-core/src/main/scala/im/actor/server/push/google/DeliveryStream.scala | Scala | agpl-3.0 | 2,921 |
package org.jetbrains.plugins.scala.lang.psi.impl.expr
import com.intellij.lang.ASTNode
import com.intellij.psi.PsiElementVisitor
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElementImpl
import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.types.api.Nothing
import org.jetbrains.plugins.scala.lang.psi.types.result._
import org.jetbrains.plugins.scala.lang.psi.types.{ScType, ScTypeExt}
/**
* @author Alexander Podkhalyuzin
* Date: 06.03.2008
*/
class ScMatchStmtImpl(node: ASTNode) extends ScalaPsiElementImpl(node) with ScMatchStmt {
override def accept(visitor: PsiElementVisitor): Unit = {
visitor match {
case visitor: ScalaElementVisitor => super.accept(visitor)
case _ => super.accept(visitor)
}
}
override def toString: String = "MatchStatement"
protected override def innerType: TypeResult = {
val branchesTypes = getBranches.map(_.`type`().getOrNothing)
val branchesLub = branchesTypes.foldLeft(Nothing: ScType)(_.lub(_))
Right(branchesLub)
}
} | gtache/intellij-lsp | intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScMatchStmtImpl.scala | Scala | apache-2.0 | 1,125 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.v2.avro
import java.net.URI
import scala.util.control.NonFatal
import org.apache.avro.file.DataFileReader
import org.apache.avro.generic.{GenericDatumReader, GenericRecord}
import org.apache.avro.mapred.FsInput
import org.apache.hadoop.fs.Path
import org.apache.spark.TaskContext
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.internal.Logging
import org.apache.spark.sql.avro.{AvroDeserializer, AvroOptions, AvroUtils}
import org.apache.spark.sql.catalyst.{InternalRow, NoopFilters, OrderedFilters}
import org.apache.spark.sql.connector.read.PartitionReader
import org.apache.spark.sql.execution.datasources.{DataSourceUtils, PartitionedFile}
import org.apache.spark.sql.execution.datasources.v2.{EmptyPartitionReader, FilePartitionReaderFactory, PartitionReaderWithPartitionValues}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.Filter
import org.apache.spark.sql.types.StructType
import org.apache.spark.util.SerializableConfiguration
/**
* A factory used to create AVRO readers.
*
* @param sqlConf SQL configuration.
* @param broadcastedConf Broadcast serializable Hadoop Configuration.
* @param dataSchema Schema of AVRO files.
* @param readDataSchema Required data schema of AVRO files.
* @param partitionSchema Schema of partitions.
* @param parsedOptions Options for parsing AVRO files.
*/
case class AvroPartitionReaderFactory(
sqlConf: SQLConf,
broadcastedConf: Broadcast[SerializableConfiguration],
dataSchema: StructType,
readDataSchema: StructType,
partitionSchema: StructType,
parsedOptions: AvroOptions,
filters: Seq[Filter]) extends FilePartitionReaderFactory with Logging {
private val datetimeRebaseModeInRead = parsedOptions.datetimeRebaseModeInRead
override def buildReader(partitionedFile: PartitionedFile): PartitionReader[InternalRow] = {
val conf = broadcastedConf.value.value
val userProvidedSchema = parsedOptions.schema
if (parsedOptions.ignoreExtension || partitionedFile.filePath.endsWith(".avro")) {
val reader = {
val in = new FsInput(new Path(new URI(partitionedFile.filePath)), conf)
try {
val datumReader = userProvidedSchema match {
case Some(userSchema) => new GenericDatumReader[GenericRecord](userSchema)
case _ => new GenericDatumReader[GenericRecord]()
}
DataFileReader.openReader(in, datumReader)
} catch {
case NonFatal(e) =>
logError("Exception while opening DataFileReader", e)
in.close()
throw e
}
}
// Ensure that the reader is closed even if the task fails or doesn't consume the entire
// iterator of records.
Option(TaskContext.get()).foreach { taskContext =>
taskContext.addTaskCompletionListener[Unit] { _ =>
reader.close()
}
}
reader.sync(partitionedFile.start)
val datetimeRebaseMode = DataSourceUtils.datetimeRebaseSpec(
reader.asInstanceOf[DataFileReader[_]].getMetaString,
datetimeRebaseModeInRead)
val avroFilters = if (SQLConf.get.avroFilterPushDown) {
new OrderedFilters(filters, readDataSchema)
} else {
new NoopFilters
}
val fileReader = new PartitionReader[InternalRow] with AvroUtils.RowReader {
override val fileReader = reader
override val deserializer = new AvroDeserializer(
userProvidedSchema.getOrElse(reader.getSchema),
readDataSchema,
parsedOptions.positionalFieldMatching,
datetimeRebaseMode,
avroFilters)
override val stopPosition = partitionedFile.start + partitionedFile.length
override def next(): Boolean = hasNextRow
override def get(): InternalRow = nextRow
override def close(): Unit = reader.close()
}
new PartitionReaderWithPartitionValues(fileReader, readDataSchema,
partitionSchema, partitionedFile.partitionValues)
} else {
new EmptyPartitionReader[InternalRow]
}
}
}
| mahak/spark | external/avro/src/main/scala/org/apache/spark/sql/v2/avro/AvroPartitionReaderFactory.scala | Scala | apache-2.0 | 4,900 |
package co.ledger.wallet.web.ripple.core.database
import co.ledger.wallet.web.ripple.core.idb.IndexedDb
import com.sun.org.apache.xpath.internal.functions.FuncTranslate
import org.scalajs.dom.idb
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.{Failure, Success}
/**
*
* DatabaseDeclaration
* ledger-wallet-ripple-chrome
*
* Created by Pierre Pollastri on 07/06/2016.
*
* The MIT License (MIT)
*
* Copyright (c) 2016 Ledger
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
trait DatabaseDeclaration {
def name: String
def version: Int
def models: Seq[QueryHelper[_]]
def open(): Future[idb.Database] = {
IndexedDb.open(name, Some(version)) {(connection, transaction) =>
// Create all store
for (model <- models) {
model.creator.create(connection, transaction)
}
} andThen {
case Success(connection) => _connection = Option(connection)
case Failure(ex) => ex.printStackTrace()
}
}
def obtainConnection(): Future[idb.Database] = {
connection match {
case Some(c) => Future.successful(c)
case None => open()
}
}
def close(): Unit = _connection.foreach(_.close())
def connection: Option[idb.Database] = _connection
def delete() = IndexedDb.delete(name)
private var _connection: Option[idb.Database] = None
}
| LedgerHQ/ledger-wallet-ripple | src/main/scala/co/ledger/wallet/web/ripple/core/database/DatabaseDeclaration.scala | Scala | mit | 2,432 |
package test
trait ImportMe {
def foo(i: Int) = 1
def foo(s: String) = 2
}
class Test(val importMe: ImportMe) {
import importMe._
import importMe._
// A.scala:12: error: reference to foo is ambiguous;
// it is imported twice in the same scope by
// import importMe._
// and import importMe._
// println(foo(1))
// ^
println(foo(1))
}
| som-snytt/dotty | tests/pos/t6117.scala | Scala | apache-2.0 | 371 |
package scifn.func
final case class FnDiagnostics[-A](missing: List[Basis[A, Any]], errors: List[Basis[A, Any]])
| scifn/scifn | scifn-gen/src/main/scala/scifn/func/FnDiagnostics.scala | Scala | mit | 114 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.hibench.sparkbench.ml
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.mllib.clustering.{LDA, DistributedLDAModel, LocalLDAModel}
import org.apache.spark.mllib.linalg.{Vector, Vectors}
import org.apache.spark.rdd.RDD
import scopt.OptionParser
object LDAExample {
case class Params(
inputPath: String = null,
outputPath: String = null,
numTopics: Int = 10,
maxIterations: Int = 10,
optimizer: String = "online",
maxResultSize: String = "1g")
def main(args: Array[String]): Unit = {
val defaultParams = Params()
val parser = new OptionParser[Params]("LDA") {
head("LDA: an example app for LDA.")
opt[String]("optimizer")
.text(s"optimizer, default: ${defaultParams.optimizer}")
.action((x, c) => c.copy(optimizer = x))
opt[String]("maxResultSize")
.text("max resultSize, default: ${defaultParams.maxResultSize}")
.action((x, c) => c.copy(maxResultSize = x))
opt[Int]("numTopics")
.text(s"number of Topics, default: ${defaultParams.numTopics}")
.action((x, c) => c.copy(numTopics = x))
opt[Int]("maxIterations")
.text(s"number of max iterations, default: ${defaultParams.maxIterations}")
.action((x, c) => c.copy(maxIterations = x))
arg[String]("<inputPath>")
.required()
.text("Input paths")
.action((x, c) => c.copy(inputPath = x))
arg[String]("<outputPath>")
.required()
.text("outputPath paths")
.action((x, c) => c.copy(outputPath = x))
}
parser.parse(args, defaultParams) match {
case Some(params) => run(params)
case _ => sys.exit(1)
}
}
def run(params: Params): Unit = {
val conf = new SparkConf()
.setAppName(s"LDA Example with $params")
.set("spark.driver.maxResultSize", params.maxResultSize)
val sc = new SparkContext(conf)
val corpus: RDD[(Long, Vector)] = sc.objectFile(params.inputPath)
// Cluster the documents into numTopics topics using LDA
val ldaModel = new LDA().setK(params.numTopics).setMaxIterations(params.maxIterations).setOptimizer(params.optimizer).run(corpus)
// Save and load model.
ldaModel.save(sc, params.outputPath)
val savedModel = LocalLDAModel.load(sc, params.outputPath)
sc.stop()
}
}
| maismail/HiBench | sparkbench/ml/src/main/scala/com/intel/sparkbench/ml/LDAExample.scala | Scala | apache-2.0 | 3,162 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.sources
import java.io.File
import scala.util.Random
import org.apache.hadoop.fs.Path
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.sql._
import org.apache.spark.sql.execution.DataSourceScanExec
import org.apache.spark.sql.execution.datasources._
import org.apache.spark.sql.hive.test.TestHiveSingleton
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy._
import org.apache.spark.sql.test.SQLTestUtils
import org.apache.spark.sql.types._
abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with TestHiveSingleton {
import spark.implicits._
val dataSourceName: String
protected val parquetDataSourceName: String =
classOf[org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat].getCanonicalName
private def isParquetDataSource: Boolean = dataSourceName == parquetDataSourceName
protected def supportsDataType(dataType: DataType): Boolean = true
val dataSchema =
StructType(
Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", StringType, nullable = false)))
lazy val testDF = (1 to 3).map(i => (i, s"val_$i")).toDF("a", "b")
lazy val partitionedTestDF1 = (for {
i <- 1 to 3
p2 <- Seq("foo", "bar")
} yield (i, s"val_$i", 1, p2)).toDF("a", "b", "p1", "p2")
lazy val partitionedTestDF2 = (for {
i <- 1 to 3
p2 <- Seq("foo", "bar")
} yield (i, s"val_$i", 2, p2)).toDF("a", "b", "p1", "p2")
lazy val partitionedTestDF = partitionedTestDF1.union(partitionedTestDF2)
def checkQueries(df: DataFrame): Unit = {
// Selects everything
checkAnswer(
df,
for (i <- 1 to 3; p1 <- 1 to 2; p2 <- Seq("foo", "bar")) yield Row(i, s"val_$i", p1, p2))
// Simple filtering and partition pruning
checkAnswer(
df.filter($"a" > 1 && $"p1" === 2),
for (i <- 2 to 3; p2 <- Seq("foo", "bar")) yield Row(i, s"val_$i", 2, p2))
// Simple projection and filtering
checkAnswer(
df.filter($"a" > 1).select($"b", $"a" + 1),
for (i <- 2 to 3; _ <- 1 to 2; _ <- Seq("foo", "bar")) yield Row(s"val_$i", i + 1))
// Simple projection and partition pruning
checkAnswer(
df.filter($"a" > 1 && $"p1" < 2).select($"b", $"p1"),
for (i <- 2 to 3; _ <- Seq("foo", "bar")) yield Row(s"val_$i", 1))
// Project many copies of columns with different types (reproduction for SPARK-7858)
checkAnswer(
df.filter($"a" > 1 && $"p1" < 2).select($"b", $"b", $"b", $"b", $"p1", $"p1", $"p1", $"p1"),
for (i <- 2 to 3; _ <- Seq("foo", "bar"))
yield Row(s"val_$i", s"val_$i", s"val_$i", s"val_$i", 1, 1, 1, 1))
// Self-join
df.createOrReplaceTempView("t")
withTempView("t") {
checkAnswer(
sql(
"""SELECT l.a, r.b, l.p1, r.p2
|FROM t l JOIN t r
|ON l.a = r.a AND l.p1 = r.p1 AND l.p2 = r.p2
""".stripMargin),
for (i <- 1 to 3; p1 <- 1 to 2; p2 <- Seq("foo", "bar")) yield Row(i, s"val_$i", p1, p2))
}
}
private val supportedDataTypes = Seq(
StringType, BinaryType,
NullType, BooleanType,
ByteType, ShortType, IntegerType, LongType,
FloatType, DoubleType, DecimalType(25, 5), DecimalType(6, 5),
DateType, TimestampType,
ArrayType(IntegerType),
MapType(StringType, LongType),
new StructType()
.add("f1", FloatType, nullable = true)
.add("f2", ArrayType(BooleanType, containsNull = true), nullable = true),
new TestUDT.MyDenseVectorUDT()
).filter(supportsDataType)
test(s"test all data types") {
val parquetDictionaryEncodingEnabledConfs = if (isParquetDataSource) {
// Run with/without Parquet dictionary encoding enabled for Parquet data source.
Seq(true, false)
} else {
Seq(false)
}
for (dataType <- supportedDataTypes) {
for (parquetDictionaryEncodingEnabled <- parquetDictionaryEncodingEnabledConfs) {
val extraMessage = if (isParquetDataSource) {
s" with parquet.enable.dictionary = $parquetDictionaryEncodingEnabled"
} else {
""
}
logInfo(s"Testing $dataType data type$extraMessage")
val extraOptions = Map[String, String](
"parquet.enable.dictionary" -> parquetDictionaryEncodingEnabled.toString,
"timestampFormat" -> "yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX"
)
withTempPath { file =>
val path = file.getCanonicalPath
val seed = System.nanoTime()
withClue(s"Random data generated with the seed: ${seed}") {
val java8ApiConfValues = if (dataType == DateType || dataType == TimestampType) {
Seq(false, true)
} else {
Seq(false)
}
java8ApiConfValues.foreach { java8Api =>
withSQLConf(
SQLConf.DATETIME_JAVA8API_ENABLED.key -> java8Api.toString,
SQLConf.LEGACY_PARQUET_REBASE_MODE_IN_WRITE.key -> CORRECTED.toString,
SQLConf.LEGACY_AVRO_REBASE_MODE_IN_WRITE.key -> CORRECTED.toString) {
val dataGenerator = RandomDataGenerator.forType(
dataType = dataType,
nullable = true,
new Random(seed)
).getOrElse {
fail(s"Failed to create data generator for schema $dataType")
}
// Create a DF for the schema with random data. The index field is used to sort the
// DataFrame. This is a workaround for SPARK-10591.
val schema = new StructType()
.add("index", IntegerType, nullable = false)
.add("col", dataType, nullable = true)
val rdd =
spark.sparkContext.parallelize((1 to 20).map(i => Row(i, dataGenerator())))
val df = spark.createDataFrame(rdd, schema).orderBy("index").coalesce(1)
df.write
.mode("overwrite")
.format(dataSourceName)
.option("dataSchema", df.schema.json)
.options(extraOptions)
.save(path)
val loadedDF = spark
.read
.format(dataSourceName)
.option("dataSchema", df.schema.json)
.schema(df.schema)
.options(extraOptions)
.load(path)
.orderBy("index")
checkAnswer(loadedDF, df)
}
}
}
}
}
}
}
test("save()/load() - non-partitioned table - Overwrite") {
withTempPath { file =>
testDF.write.mode(SaveMode.Overwrite).format(dataSourceName).save(file.getCanonicalPath)
testDF.write.mode(SaveMode.Overwrite).format(dataSourceName).save(file.getCanonicalPath)
checkAnswer(
spark.read.format(dataSourceName)
.option("path", file.getCanonicalPath)
.option("dataSchema", dataSchema.json)
.load(),
testDF.collect())
}
}
test("save()/load() - non-partitioned table - Append") {
withTempPath { file =>
testDF.write.mode(SaveMode.Overwrite).format(dataSourceName).save(file.getCanonicalPath)
testDF.write.mode(SaveMode.Append).format(dataSourceName).save(file.getCanonicalPath)
checkAnswer(
spark.read.format(dataSourceName)
.option("dataSchema", dataSchema.json)
.load(file.getCanonicalPath).orderBy("a"),
testDF.union(testDF).orderBy("a").collect())
}
}
test("save()/load() - non-partitioned table - ErrorIfExists") {
withTempDir { file =>
intercept[AnalysisException] {
testDF.write.format(dataSourceName).mode(SaveMode.ErrorIfExists).save(file.getCanonicalPath)
}
}
}
test("save()/load() - non-partitioned table - Ignore") {
withTempDir { file =>
testDF.write.mode(SaveMode.Ignore).format(dataSourceName).save(file.getCanonicalPath)
val path = new Path(file.getCanonicalPath)
val fs = path.getFileSystem(spark.sessionState.newHadoopConf())
assert(fs.listStatus(path).isEmpty)
}
}
test("save()/load() - partitioned table - simple queries") {
withTempPath { file =>
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.ErrorIfExists)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
checkQueries(
spark.read.format(dataSourceName)
.option("dataSchema", dataSchema.json)
.load(file.getCanonicalPath))
}
}
test("save()/load() - partitioned table - Overwrite") {
withTempPath { file =>
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
checkAnswer(
spark.read.format(dataSourceName)
.option("dataSchema", dataSchema.json)
.load(file.getCanonicalPath),
partitionedTestDF.collect())
}
}
test("save()/load() - partitioned table - Append") {
withTempPath { file =>
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Append)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
checkAnswer(
spark.read.format(dataSourceName)
.option("dataSchema", dataSchema.json)
.load(file.getCanonicalPath),
partitionedTestDF.union(partitionedTestDF).collect())
}
}
test("save()/load() - partitioned table - Append - new partition values") {
withTempPath { file =>
partitionedTestDF1.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
partitionedTestDF2.write
.format(dataSourceName)
.mode(SaveMode.Append)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
checkAnswer(
spark.read.format(dataSourceName)
.option("dataSchema", dataSchema.json)
.load(file.getCanonicalPath),
partitionedTestDF.collect())
}
}
test("save()/load() - partitioned table - ErrorIfExists") {
withTempDir { file =>
intercept[AnalysisException] {
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.ErrorIfExists)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
}
}
}
test("save()/load() - partitioned table - Ignore") {
withTempDir { file =>
partitionedTestDF.write
.format(dataSourceName).mode(SaveMode.Ignore).save(file.getCanonicalPath)
val path = new Path(file.getCanonicalPath)
val fs = path.getFileSystem(SparkHadoopUtil.get.conf)
assert(fs.listStatus(path).isEmpty)
}
}
test("saveAsTable()/load() - non-partitioned table - Overwrite") {
testDF.write.format(dataSourceName).mode(SaveMode.Overwrite)
.option("dataSchema", dataSchema.json)
.saveAsTable("t")
withTable("t") {
checkAnswer(spark.table("t"), testDF.collect())
}
}
test("saveAsTable()/load() - non-partitioned table - Append") {
testDF.write.format(dataSourceName).mode(SaveMode.Overwrite).saveAsTable("t")
testDF.write.format(dataSourceName).mode(SaveMode.Append).saveAsTable("t")
withTable("t") {
checkAnswer(spark.table("t"), testDF.union(testDF).orderBy("a").collect())
}
}
test("saveAsTable()/load() - non-partitioned table - ErrorIfExists") {
withTable("t") {
sql(s"CREATE TABLE t(i INT) USING $dataSourceName")
val msg = intercept[AnalysisException] {
testDF.write.format(dataSourceName).mode(SaveMode.ErrorIfExists).saveAsTable("t")
}.getMessage
assert(msg.contains("Table `t` already exists"))
}
}
test("saveAsTable()/load() - non-partitioned table - Ignore") {
withTable("t") {
sql(s"CREATE TABLE t(i INT) USING $dataSourceName")
testDF.write.format(dataSourceName).mode(SaveMode.Ignore).saveAsTable("t")
assert(spark.table("t").collect().isEmpty)
}
}
test("saveAsTable()/load() - partitioned table - simple queries") {
partitionedTestDF.write.format(dataSourceName)
.mode(SaveMode.Overwrite)
.option("dataSchema", dataSchema.json)
.saveAsTable("t")
withTable("t") {
checkQueries(spark.table("t"))
}
}
test("saveAsTable()/load() - partitioned table - boolean type") {
spark.range(2)
.select($"id", ($"id" % 2 === 0).as("b"))
.write.partitionBy("b").saveAsTable("t")
withTable("t") {
checkAnswer(
spark.table("t").sort($"id"),
Row(0, true) :: Row(1, false) :: Nil
)
}
}
test("saveAsTable()/load() - partitioned table - Overwrite") {
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
withTable("t") {
checkAnswer(spark.table("t"), partitionedTestDF.collect())
}
}
test("saveAsTable()/load() - partitioned table - Append") {
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Append)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
withTable("t") {
checkAnswer(spark.table("t"), partitionedTestDF.union(partitionedTestDF).collect())
}
}
test("saveAsTable()/load() - partitioned table - Append - new partition values") {
partitionedTestDF1.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
partitionedTestDF2.write
.format(dataSourceName)
.mode(SaveMode.Append)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
withTable("t") {
checkAnswer(spark.table("t"), partitionedTestDF.collect())
}
}
test("saveAsTable()/load() - partitioned table - Append - mismatched partition columns") {
partitionedTestDF1.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
// Using only a subset of all partition columns
intercept[AnalysisException] {
partitionedTestDF2.write
.format(dataSourceName)
.mode(SaveMode.Append)
.option("dataSchema", dataSchema.json)
.partitionBy("p1")
.saveAsTable("t")
}
}
test("saveAsTable()/load() - partitioned table - ErrorIfExists") {
Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t")
withTempView("t") {
intercept[AnalysisException] {
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.ErrorIfExists)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
}
}
}
test("saveAsTable()/load() - partitioned table - Ignore") {
Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t")
withTempView("t") {
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Ignore)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
assert(spark.table("t").collect().isEmpty)
}
}
test("load() - with directory of unpartitioned data in nested subdirs") {
withTempPath { dir =>
val subdir = new File(dir, "subdir")
val dataInDir = Seq(1, 2, 3).toDF("value")
val dataInSubdir = Seq(4, 5, 6).toDF("value")
/*
Directory structure to be generated
dir
|
|___ [ files of dataInDir ]
|
|___ subsubdir
|
|___ [ files of dataInSubdir ]
*/
// Generated dataInSubdir, not data in dir
dataInSubdir.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.save(subdir.getCanonicalPath)
// Inferring schema should throw error as it should not find any file to infer
val e = intercept[Exception] {
spark.read.format(dataSourceName).load(dir.getCanonicalPath)
}
e match {
case _: AnalysisException =>
assert(e.getMessage.contains("infer"))
case _: java.util.NoSuchElementException if e.getMessage.contains("dataSchema") =>
// Ignore error, the source format requires schema to be provided by user
// This is needed for SimpleTextHadoopFsRelationSuite as SimpleTextSource needs schema
case _ =>
fail("Unexpected error trying to infer schema from empty dir", e)
}
/** Test whether data is read with the given path matches the expected answer */
def testWithPath(path: File, expectedAnswer: Seq[Row]): Unit = {
val df = spark.read
.format(dataSourceName)
.schema(dataInDir.schema) // avoid schema inference for any format
.load(path.getCanonicalPath)
checkAnswer(df, expectedAnswer)
}
// Verify that reading by path 'dir/' gives empty results as there are no files in 'file'
// and it should not pick up files in 'dir/subdir'
require(subdir.exists)
require(subdir.listFiles().exists(!_.isDirectory))
testWithPath(dir, Seq.empty)
// Verify that if there is data in dir, then reading by path 'dir/' reads only dataInDir
dataInDir.write
.format(dataSourceName)
.mode(SaveMode.Append) // append to prevent subdir from being deleted
.save(dir.getCanonicalPath)
require(dir.listFiles().exists(!_.isDirectory))
require(subdir.exists())
require(subdir.listFiles().exists(!_.isDirectory))
testWithPath(dir, dataInDir.collect())
}
}
test("Hadoop style globbing - unpartitioned data") {
withTempPath { file =>
val dir = file.getCanonicalPath
val subdir = new File(dir, "subdir")
val subsubdir = new File(subdir, "subsubdir")
val anotherSubsubdir =
new File(new File(dir, "another-subdir"), "another-subsubdir")
val dataInSubdir = Seq(1, 2, 3).toDF("value")
val dataInSubsubdir = Seq(4, 5, 6).toDF("value")
val dataInAnotherSubsubdir = Seq(7, 8, 9).toDF("value")
dataInSubdir.write
.format (dataSourceName)
.mode (SaveMode.Overwrite)
.save (subdir.getCanonicalPath)
dataInSubsubdir.write
.format (dataSourceName)
.mode (SaveMode.Overwrite)
.save (subsubdir.getCanonicalPath)
dataInAnotherSubsubdir.write
.format (dataSourceName)
.mode (SaveMode.Overwrite)
.save (anotherSubsubdir.getCanonicalPath)
require(subdir.exists)
require(subdir.listFiles().exists(!_.isDirectory))
require(subsubdir.exists)
require(subsubdir.listFiles().exists(!_.isDirectory))
require(anotherSubsubdir.exists)
require(anotherSubsubdir.listFiles().exists(!_.isDirectory))
/*
Directory structure generated
dir
|
|___ subdir
| |
| |___ [ files of dataInSubdir ]
| |
| |___ subsubdir
| |
| |___ [ files of dataInSubsubdir ]
|
|
|___ anotherSubdir
|
|___ anotherSubsubdir
|
|___ [ files of dataInAnotherSubsubdir ]
*/
val schema = dataInSubdir.schema
/** Check whether data is read with the given path matches the expected answer */
def check(path: String, expectedDf: DataFrame): Unit = {
val df = spark.read
.format(dataSourceName)
.schema(schema) // avoid schema inference for any format, expected to be same format
.load(path)
checkAnswer(df, expectedDf)
}
check(s"$dir/*/", dataInSubdir)
check(s"$dir/sub*/*", dataInSubdir.union(dataInSubsubdir))
check(s"$dir/another*/*", dataInAnotherSubsubdir)
check(s"$dir/*/another*", dataInAnotherSubsubdir)
check(s"$dir/*/*", dataInSubdir.union(dataInSubsubdir).union(dataInAnotherSubsubdir))
}
}
test("Hadoop style globbing - partitioned data with schema inference") {
// Tests the following on partition data
// - partitions are not discovered with globbing and without base path set.
// - partitions are discovered with globbing and base path set, though more detailed
// tests for this is in ParquetPartitionDiscoverySuite
withTempPath { path =>
val dir = path.getCanonicalPath
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.partitionBy("p1", "p2")
.save(dir)
def check(
path: String,
expectedResult: Either[DataFrame, String],
basePath: Option[String] = None
): Unit = {
try {
val reader = spark.read
basePath.foreach(reader.option("basePath", _))
val testDf = reader
.format(dataSourceName)
.load(path)
assert(expectedResult.isLeft, s"Error was expected with $path but result found")
checkAnswer(testDf, expectedResult.left.get)
} catch {
case e: java.util.NoSuchElementException if e.getMessage.contains("dataSchema") =>
// Ignore error, the source format requires schema to be provided by user
// This is needed for SimpleTextHadoopFsRelationSuite as SimpleTextSource needs schema
case e: Throwable =>
assert(expectedResult.isRight, s"Was not expecting error with $path: " + e)
assert(
e.getMessage.contains(expectedResult.right.get),
s"Did not find expected error message with $path")
}
}
object Error {
def apply(msg: String): Either[DataFrame, String] = Right(msg)
}
object Result {
def apply(df: DataFrame): Either[DataFrame, String] = Left(df)
}
// ---- Without base path set ----
// Should find all the data with partitioning columns
check(s"$dir", Result(partitionedTestDF))
// Should fail as globbing finds dirs without files, only subdirs in them.
check(s"$dir/*/", Error("please set \"basePath\""))
check(s"$dir/p1=*/", Error("please set \"basePath\""))
// Should not find partition columns as the globs resolve to p2 dirs
// with files in them
check(s"$dir/*/*", Result(partitionedTestDF.drop("p1", "p2")))
check(s"$dir/p1=*/p2=foo", Result(partitionedTestDF.filter("p2 = 'foo'").drop("p1", "p2")))
check(s"$dir/p1=1/p2=???", Result(partitionedTestDF.filter("p1 = 1").drop("p1", "p2")))
// Should find all data without the partitioning columns as the globs resolve to the files
check(s"$dir/*/*/*", Result(partitionedTestDF.drop("p1", "p2")))
// ---- With base path set ----
val resultDf = partitionedTestDF.select("a", "b", "p1", "p2")
check(path = s"$dir/*", Result(resultDf), basePath = Some(dir))
check(path = s"$dir/*/*", Result(resultDf), basePath = Some(dir))
check(path = s"$dir/*/*/*", Result(resultDf), basePath = Some(dir))
}
}
test("SPARK-9735 Partition column type casting") {
withTempPath { file =>
val df = (for {
i <- 1 to 3
p2 <- Seq("foo", "bar")
} yield (i, s"val_$i", 1.0d, p2, 123, 123.123f)).toDF("a", "b", "p1", "p2", "p3", "f")
val input = df.select(
$"a",
$"b",
$"p1".cast(StringType).as("ps1"),
$"p2",
$"p3".cast(FloatType).as("pf1"),
$"f")
withTempView("t") {
input
.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.partitionBy("ps1", "p2", "pf1", "f")
.saveAsTable("t")
input
.write
.format(dataSourceName)
.mode(SaveMode.Append)
.partitionBy("ps1", "p2", "pf1", "f")
.saveAsTable("t")
val realData = input.collect()
checkAnswer(spark.table("t"), realData ++ realData)
}
}
}
test("SPARK-7616: adjust column name order accordingly when saving partitioned table") {
val df = (1 to 3).map(i => (i, s"val_$i", i * 2)).toDF("a", "b", "c")
df.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.partitionBy("c", "a")
.saveAsTable("t")
withTable("t") {
checkAnswer(spark.table("t").select("b", "c", "a"), df.select("b", "c", "a").collect())
}
}
test("SPARK-8887: Explicitly define which data types can be used as dynamic partition columns") {
val df = Seq(
(1, "v1", Array(1, 2, 3), Map("k1" -> "v1"), Tuple2(1, "4")),
(2, "v2", Array(4, 5, 6), Map("k2" -> "v2"), Tuple2(2, "5")),
(3, "v3", Array(7, 8, 9), Map("k3" -> "v3"), Tuple2(3, "6"))).toDF("a", "b", "c", "d", "e")
withTempDir { file =>
intercept[AnalysisException] {
df.write.format(dataSourceName).partitionBy("c", "d", "e").save(file.getCanonicalPath)
}
}
intercept[AnalysisException] {
df.write.format(dataSourceName).partitionBy("c", "d", "e").saveAsTable("t")
}
}
test("Locality support for FileScanRDD") {
val options = Map[String, String](
"fs.file.impl" -> classOf[LocalityTestFileSystem].getName,
"fs.file.impl.disable.cache" -> "true"
)
withTempPath { dir =>
val path = dir.toURI.toString
val df1 = spark.range(4)
df1.coalesce(1).write.mode("overwrite").options(options).format(dataSourceName).save(path)
df1.coalesce(1).write.mode("append").options(options).format(dataSourceName).save(path)
def checkLocality(): Unit = {
val df2 = spark.read
.format(dataSourceName)
.option("dataSchema", df1.schema.json)
.options(options)
.load(path)
val Some(fileScanRDD) = df2.queryExecution.executedPlan.collectFirst {
case scan: DataSourceScanExec if scan.inputRDDs().head.isInstanceOf[FileScanRDD] =>
scan.inputRDDs().head.asInstanceOf[FileScanRDD]
}
val partitions = fileScanRDD.partitions
val preferredLocations = partitions.flatMap(fileScanRDD.preferredLocations)
assert(preferredLocations.distinct.length == 2)
}
withSQLConf(SQLConf.USE_V1_SOURCE_LIST.key -> dataSourceName) {
checkLocality()
withSQLConf(SQLConf.PARALLEL_PARTITION_DISCOVERY_THRESHOLD.key -> "0") {
checkLocality()
}
}
}
}
test("SPARK-16975: Partitioned table with the column having '_' should be read correctly") {
withTempDir { dir =>
val childDir = new File(dir, dataSourceName).getCanonicalPath
val dataDf = spark.range(10).toDF()
val df = dataDf.withColumn("_col", $"id")
df.write.format(dataSourceName).partitionBy("_col").save(childDir)
val reader = spark.read.format(dataSourceName)
// This is needed for SimpleTextHadoopFsRelationSuite as SimpleTextSource needs schema.
if (dataSourceName == classOf[SimpleTextSource].getCanonicalName) {
reader.option("dataSchema", dataDf.schema.json)
}
val readBack = reader.load(childDir)
checkAnswer(df, readBack)
}
}
}
| ConeyLiu/spark | sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala | Scala | apache-2.0 | 29,067 |
package controllers
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import org.scalatest.BeforeAndAfter
import org.scalatestplus.play._
import play.api.test.Helpers._
import play.api.test._
class ApplicationSpec extends PlaySpec with OneAppPerSuite with BeforeAndAfter with FilePreparator {
"Application" should {
"send 404 on a bad request" in {
val Some(wrongRoute) = route(app, FakeRequest(GET, "/boum"))
status(wrongRoute) mustBe NOT_FOUND
}
"render the test page" in {
val api = route(app, FakeRequest(GET, "/test")).get
status(api) mustBe OK
contentType(api).get mustBe ("application/json")
contentAsString(api) must include("host")
contentAsString(api) must include("uri")
contentAsString(api) must include("session")
}
"render the index page" in {
val index = route(app, FakeRequest(GET, "/")).get
status(index) mustBe OK
contentType(index).get mustBe ("text/html")
contentAsString(index) must include("Uploaded files")
}
}
"Get files list from akka stream" in {
prepareFiles()
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
val api = route(app, FakeRequest(GET, "/timeline")).get
status(api) mustBe OK
contentType(api).get mustBe ("text/plain")
contentAsString(api) must include("Test 1")
contentAsString(api) must include("Test 2")
contentAsString(api) must include("failures: 99")
}
} | olka/test-reports-agregator | test/controllers/ApplicationSpec.scala | Scala | mit | 1,498 |
def map(i: Int)(f: Int => String): String
def map2(a: Int, b: Int)(f: (Int, Int) => String): String
trait Gen[A] {
val count: Int
val f: () => A
private def transform(i: int): A = f()
val cases: Stream[A] = Stream.from(1).take(count).map(transform)
}
object Gen {
}
object Prop {
def &&(l: Prop, r: Prop): Prop = new Prop{
def apply(a: A) = if (l.apply(a)) r.apply(a) else false
}
def forAll(p: Prop)(g: Gen[A]): = g.cases.map(a => (prop.apply(a), a))
}
trait Prop{
type SuccessCount = Int
type Message = String
def apply(a: A): Boolean
def check[A](g: Gen[A): Either[Message, SuccessCount]
}
| ChrisCoffey/sandbox | src/ops.scala | Scala | unlicense | 629 |
package com.thoughtworks.microbuilder.play
import java.io.ByteArrayOutputStream
import com.thoughtworks.microbuilder.core.{IRouteConfiguration, MatchResult}
import com.thoughtworks.microbuilder.play.exception.MicrobuilderException.{WrongResponseFormatException, NativeException}
import haxe.io.Output
import haxe.lang.HaxeException
import jsonStream.io.PrettyTextPrinter
import jsonStream.rpc.{IJsonResponseHandler, IJsonService}
import jsonStream.{JsonStream, JsonStreamPair}
import org.slf4j.LoggerFactory
import play.api.http.Writeable
import play.api.mvc._
import play.api.http.HeaderNames
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Future, Promise}
import scala.util.Try
private object RpcController {
private val logger = LoggerFactory.getLogger(classOf[RpcController])
}
class RpcController(rpcEntries: Seq[RpcEntry]) extends Controller {
import RpcController._
def rpc(uri: String) = Action.async { request =>
val uriWithQuery =
raw"""$uri${
request.uri.indexOf('?') match {
case -1 => ""
case i => request.uri.substring(i)
}
}"""
logger.debug(raw"""Parsing URI $uriWithQuery...""")
val bodyJsonStream: Option[JsonStream] = request.body.asText match {
case None => None
case Some(jsonStream) => Some(JsonStream.STRING(jsonStream))
}
val matchedEntries = for {
rpcEntry <- rpcEntries.iterator
matchResult = rpcEntry.routeConfiguration.matchUri(
new com.thoughtworks.microbuilder.core.Request(
request.method,
uriWithQuery,
(for {
header <- request.headers.headers
} yield new com.thoughtworks.microbuilder.core.Header(header._1, header._2)) (collection.breakOut(Array.canBuildFrom)),
bodyJsonStream.getOrElse(null),
request.contentType.getOrElse(null),
request.headers.get(HeaderNames.ACCEPT).getOrElse(null)
)
);
if (matchResult != null)
} yield (rpcEntry, matchResult)
if (matchedEntries.hasNext) {
val (rpcEntry, matchResult) = matchedEntries.next()
try {
val promise = Promise[Result]
val resp: IJsonResponseHandler = new IJsonResponseHandler {
override def onFailure(jsonStream: JsonStream): Unit = {
/*
{
com.thoughtworks.microbuilder.core.Failure : {
STRUCTURAL_APPLICATION_FAILURE : {
failure : {
String : app structural exception
},
status : 507
}
}
}
*/
/*{
com.thoughtworks.microbuilder.core.Failure : {
TEXT_APPLICATION_FAILURE : {
message : app exception,
status : 507
}
}
}*/
jsonStream match {
case JsonStreamExtractor.Object(pairs) =>
if (pairs.hasNext) {
val pair = pairs.next()
pair.value match {
case JsonStreamExtractor.Object(failurePairs) =>
if (failurePairs.hasNext) {
val failurePair = failurePairs.next()
val expectedFailure = rpcEntry.routeConfiguration.get_failureClassName
failurePair.key match {
case "TEXT_APPLICATION_FAILURE" =>
if (expectedFailure == null) {
promise.success(textFailureResult(failurePair.value))
} else {
promise.failure(new WrongResponseFormatException(s"Expect a $expectedFailure, actually TEXT_APPLICATION_FAILURE(${PrettyTextPrinter.toString(failurePair.value)})"))
}
case "STRUCTURAL_APPLICATION_FAILURE" =>
if (expectedFailure != null) {
promise.complete(Try(structuralFailureResult(Option(matchResult.routeEntry.get_responseContentType), rpcEntry.routeConfiguration.get_failureClassName, failurePair.value)))
} else {
promise.failure(new WrongResponseFormatException(s"Expect text, actually STRUCTURAL_APPLICATION_FAILURE(${PrettyTextPrinter.toString(failurePair.value)})"))
}
case "NATIVE_FAILURE" =>
promise.failure(nativeFailureException(failurePair.value))
case _ =>
promise.failure(new IllegalStateException("failure must be a Failure."))
}
} else {
promise.failure(new IllegalStateException("failure must contain one key/value pair."))
}
case _ =>
promise.failure(new IllegalStateException("failure must be a JSON object."))
}
} else {
promise.failure(new IllegalStateException("failure must contain one key/value pair."))
}
if (pairs.hasNext) {
promise.failure(new IllegalStateException("failure must contain only one key/value pair."))
}
}
}
override def onSuccess(jsonStream: JsonStream): Unit = {
promise.success(Ok(jsonStream)(new Writeable[JsonStream]({ jsonStream =>
val javaStream = new ByteArrayOutputStream()
PrettyTextPrinter.print(new Output {
override def writeByte(b: Int) = {
javaStream.write(b)
}
}, jsonStream, 0)
javaStream.toByteArray
}, Option(matchResult.routeEntry.get_responseContentType))))
}
}
rpcEntry.incomingServiceProxy.apply(matchResult.rpcData, resp)
promise.future
} catch {
// FIXME: Should handle different exceptions.
case e: HaxeException => {
play.api.Logger.warn(e.getObject.toString, e)
Future.successful(BadRequest(s"Cannot parse request for $uri"))
}
}
} else {
Future.successful(NotFound(s"URL $uri does not match."))
}
}
private def nativeFailureException(nativeFailureStream: JsonStream): NativeException = {
nativeFailureStream match {
case JsonStreamExtractor.Object(subPairs) =>
var messageOption: Option[String] = None
for (subPair <- subPairs) {
subPair.key match {
case "message" =>
subPair.value match {
case JsonStreamExtractor.String(messageValue) =>
messageOption = Some(messageValue)
}
}
}
new NativeException(messageOption.getOrElse(null))
}
}
private def textFailureResult(textApplicationFailureStream: JsonStream): Result = {
textApplicationFailureStream match {
case JsonStreamExtractor.Object(subPairs) =>
var messageOption: Option[String] = None
var statusOption: Option[Int] = None
for (subPair <- subPairs) {
subPair.key match {
case "message" =>
subPair.value match {
case JsonStreamExtractor.String(messageValue) =>
messageOption = Some(messageValue)
}
case "status" =>
subPair.value match {
case JsonStreamExtractor.Int32(statusValue) =>
statusOption = Some(statusValue)
case JsonStreamExtractor.Number(statusValue) =>
statusOption = Some(statusValue.toInt)
}
}
}
val Some(status) = statusOption
val Some(message) = messageOption
new Status(status)(message)
}
}
private def structuralFailureResult(contentType: Option[String], failureName: String, structuralApplicationFailureStream: JsonStream): Result = {
structuralApplicationFailureStream match {
case JsonStreamExtractor.Object(subPairs) =>
var failureStreamOption: Option[Array[Byte]] = None
var statusOption: Option[Int] = None
for (subPair <- subPairs) {
subPair.key match {
case "status" =>
subPair.value match {
case JsonStreamExtractor.Int32(statusValue) =>
statusOption = Some(statusValue)
case JsonStreamExtractor.Number(statusValue) =>
statusOption = Some(statusValue.toInt)
}
case "failure" =>
subPair.value match {
case JsonStreamExtractor.Object(failurePairs) => {
if (failurePairs.hasNext) {
val failurePair = failurePairs.next()
if (failurePair.key == failureName) {
failureStreamOption = Some(PrettyTextPrinter.toString(failurePair.value).getBytes)
} else {
throw new IllegalArgumentException(s"Failure type name does not match. Expect $failureName, actually ${failurePair.key}")
}
if (failurePairs.hasNext) {
throw new IllegalArgumentException("Failure JSON must contain one key/value pair.")
}
} else {
throw new IllegalArgumentException("Failure JSON must contain one key/value pair.")
}
}
}
}
}
val Some(status) = statusOption
val Some(failureStream) = failureStreamOption
new Status(status)(failureStream)(Writeable[Array[Byte]](locally[Array[Byte]](_), contentType))
}
}
}
| ThoughtWorksInc/microbuilder-play | src/main/scala/com/thoughtworks/microbuilder/play/RpcController.scala | Scala | apache-2.0 | 9,917 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.events
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.execution.command.{AlterTableAddColumnsModel, AlterTableDataTypeChangeModel, AlterTableDropColumnModel, AlterTableRenameModel}
import org.apache.carbondata.core.metadata.CarbonTableIdentifier
import org.apache.carbondata.core.metadata.schema.table.CarbonTable
import org.apache.carbondata.processing.loading.model.CarbonLoadModel
/**
* event for database operations
*/
trait DatabaseEventInfo {
val databaseName: String
}
/**
* event for table related operations
*/
trait TableEventInfo {
val carbonTableIdentifier: CarbonTableIdentifier
}
/**
* event for load operations
*/
trait LoadEventInfo {
val carbonLoadModel: CarbonLoadModel
}
/**
* event for lookup
*/
trait LookupRelationEventInfo {
val carbonTable: CarbonTable
}
/**
* event for drop table
*/
trait DropTableEventInfo {
val carbonTable: Option[CarbonTable]
val ifExistsSet: Boolean
}
/**
* event for alter_table_drop_column
*/
trait AlterTableDropColumnEventInfo {
val carbonTable: CarbonTable
val alterTableDropColumnModel: AlterTableDropColumnModel
}
trait AlterTableDataTypeChangeEventInfo {
val carbonTable: CarbonTable
val alterTableDataTypeChangeModel: AlterTableDataTypeChangeModel
}
/**
* event for alter_table_rename
*/
trait AlterTableRenameEventInfo {
val carbonTable: CarbonTable
val alterTableRenameModel: AlterTableRenameModel
}
/**
* event for alter_add_column
*/
trait AlterTableAddColumnEventInfo {
val carbonTable: CarbonTable
val alterTableAddColumnsModel: AlterTableAddColumnsModel
}
/**
* event for alter_table_rename
*/
trait AlterTableCompactionEventInfo {
val carbonTable: CarbonTable
val carbonLoadModel: CarbonLoadModel
val mergedLoadName: String
}
/**
* event for DeleteSegmentById
*/
trait DeleteSegmentbyIdEventInfo {
val carbonTable: CarbonTable
val loadIds: Seq[String]
}
/**
* event for DeleteSegmentByDate
*/
trait DeleteSegmentbyDateEventInfo {
val carbonTable: CarbonTable
val loadDates: String
}
/**
* event for Clean Files
*/
trait CleanFilesEventInfo {
val carbonTable: CarbonTable
}
/**
* event for update table
*/
trait UpdateTableEventInfo {
val carbonTable: CarbonTable
}
/**
* event for delete from table
*/
trait DeleteFromTableEventInfo {
val carbonTable: CarbonTable
}
/**
* event to initiate CarbonEnv
*/
trait SessionEventInfo {
val sparkSession: SparkSession
}
| HuaweiBigData/carbondata | integration/spark-common/src/main/scala/org/apache/carbondata/events/Events.scala | Scala | apache-2.0 | 3,279 |
package drt.client.components.charts
import drt.client.components.ChartJSComponent.ChartJsData
import drt.client.components.charts.DataFormat.jsonString
import drt.client.services.charts.ChartData
import drt.shared.PaxTypes._
import drt.shared.Queues.{EGate, EeaDesk, NonEeaDesk}
import drt.shared.{ApiPaxTypeAndQueueCount, Nationality, PaxTypes, Queues}
import utest.{TestSuite, _}
object PaxSplitsDataForPaxTypeChartTests extends TestSuite {
def tests = Tests {
"When extracting PaxType data to display in a chart" - {
"Given Splits containing an ApiSplit with 1 passenger split of type EEA Machine Readable " +
"Then I should get back chart data the same" - {
val apiSplit = Set(ApiPaxTypeAndQueueCount(
PaxTypes.EeaMachineReadable,
Queues.EGate, 1,
Option(Map(Nationality("GBR") -> 1.0)), None
))
val labels = Seq("EEA Machine Readable")
val data = Seq(1.0)
val expected = ChartJsData(labels, data, "Passenger Types").toJs
val result = ChartData.splitToPaxTypeData(apiSplit).toJs
assert(jsonString(result) == jsonString(expected))
}
}
"When extracting passenger type breakdown to display in a chart" - {
"Given Splits containing an ApiSplit with multiple passenger types in multiple queues " +
"Then I should get the total of each passenger type across all queues" - {
val apiSplit = Set(
ApiPaxTypeAndQueueCount(VisaNational, NonEeaDesk, 7, None, None),
ApiPaxTypeAndQueueCount(NonVisaNational, NonEeaDesk, 2, None, None),
ApiPaxTypeAndQueueCount(B5JPlusNational, EGate, 2, None, None),
ApiPaxTypeAndQueueCount(EeaBelowEGateAge, EeaDesk, 1, None, None),
ApiPaxTypeAndQueueCount(EeaMachineReadable, EGate, 7, None, None),
ApiPaxTypeAndQueueCount(EeaMachineReadable, EeaDesk, 3, None, None),
ApiPaxTypeAndQueueCount(B5JPlusNational, EeaDesk, 2, None, None)
)
val labels = Seq("B5J+ National", "EEA Child", "EEA Machine Readable", "Non-Visa National", "Visa National")
val data = Seq(4.0, 1.0, 10.0, 2.0, 7.0)
val expected = ChartJsData(labels, data, "Passenger Types").toJs
val result = ChartData.splitToPaxTypeData(apiSplit).toJs
assert(jsonString(result) == jsonString(expected))
}
}
}
}
| UKHomeOffice/drt-scalajs-spa-exploration | client/src/test/scala/drt/client/components/charts/PaxSplitsDataForPaxTypeChartTests.scala | Scala | apache-2.0 | 2,379 |
/*
* Copyright 2016 Well-Factored Software Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.wellfactored.playbindings
import org.scalatest.{EitherValues, FlatSpec, Matchers, OptionValues}
import play.api.mvc.QueryStringBindable
class ValueClassQueryStringBindableTest
extends FlatSpec
with Matchers
with OptionValues
with EitherValues
with ValueClassQueryStringBindable
with TestWrappers {
val goodLongValue = Map("a" -> Seq("1"))
val badLongValue = Map("a" -> Seq("-1"))
"bind" should "implicitly summon a binder for Test" in {
val b = implicitly[QueryStringBindable[LongWrapper]]
b.bind("a", goodLongValue).value.right.value shouldBe LongWrapper(1)
}
"unbind" should "extract the wrapped value and convert it to a String" in {
val b: QueryStringBindable[LongWrapper] = implicitly[QueryStringBindable[LongWrapper]]
val lw = LongWrapper(1337)
b.unbind("key", lw) shouldBe "key=1337"
}
}
| WellFactored/play-extras | src/test/scala/com/wellfactored/playbindings/ValueClassQueryStringBindableTest.scala | Scala | apache-2.0 | 1,482 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.contrib
import slamdata.Predef._
import _root_.monocle.{Getter, Iso}
import _root_.matryoshka._
import _root_.matryoshka.data.free._
import _root_.matryoshka.implicits._
import _root_.matryoshka.patterns._
import _root_.scalaz._, Scalaz._
package object matryoshka {
/** Chains multiple transformations together, each of which can fail to change
* anything.
*/
def applyTransforms[A](first: A => Option[A], rest: (A => Option[A])*)
: A => Option[A] =
rest.foldLeft(
first)(
(prev, next) => x => prev(x).fold(next(x))(orOriginal(next)(_).some))
object convertToFree {
def apply[F[_], A] = new PartiallyApplied[F, A]
final class PartiallyApplied[F[_], A] {
def apply[T](t: T)(implicit T: Recursive.Aux[T, F], F: Functor[F]): Free[F, A] =
t.ana[Free[F, A]](x => CoEnv(x.project.right[A]))
}
}
def envT[E, W[_], A](e: E, wa: W[A]): EnvT[E, W, A] =
EnvT((e, wa))
def envTIso[E, W[_], A]: Iso[EnvT[E, W, A], (E, W[A])] =
Iso((_: EnvT[E, W, A]).runEnvT)(EnvT(_))
def ginterpret[W[_], F[_], A, B](f: A => B, φ: GAlgebra[W, F, B])
: GAlgebra[W, CoEnv[A, F, ?], B] =
ginterpretM[W, Id, F, A, B](f, φ)
def einterpret[W[_]: Traverse, F[_], A, B](f: A => B, φ: ElgotAlgebra[W, F, B])
: ElgotAlgebra[W, CoEnv[A, F, ?], B] =
_.traverse(_.run).fold(f, φ)
def project[T, F[_]: Functor](implicit T: Recursive.Aux[T, F]): Getter[T, F[T]] =
Getter(T.project(_))
/** Make a partial endomorphism total by returning the argument when undefined. */
def totally[A](pf: PartialFunction[A, A]): A => A =
orOriginal(pf.lift)
/** Derive a recursive instance over the functor transformed by EnvT by forgetting the annotation. */
def forgetRecursive[T, E, F[_]](implicit T: Recursive.Aux[T, EnvT[E, F, ?]]): Recursive.Aux[T, F] =
new Recursive[T] {
type Base[B] = F[B]
def project(t: T)(implicit BF: Functor[Base]) =
T.project(t).lower
}
/** Derive a corecursive instance over the functor transformed by EnvT using the zero of the annotation monoid. */
def rememberCorecursive[T, E: Monoid, F[_]](implicit T: Corecursive.Aux[T, EnvT[E, F, ?]]): Corecursive.Aux[T, F] =
new Corecursive[T] {
type Base[B] = F[B]
def embed(ft: Base[T])(implicit BF: Functor[Base]) =
T.embed(envT(∅[E], ft))
}
def selfAndChildren[F[_]: Functor: Foldable, A](alg: Algebra[F, A])
: Algebra[F, (A, List[A])] =
faas => {
val fa = faas map (_._1)
(alg(fa), fa.toList)
}
implicit def delayOrder[F[_], A](implicit F: Delay[Order, F], A: Order[A]): Order[F[A]] =
F(A)
implicit def coproductOrder[F[_], G[_]](implicit F: Delay[Order, F], G: Delay[Order, G]): Delay[Order, Coproduct[F, G, ?]] =
new Delay[Order, Coproduct[F, G, ?]] {
def apply[A](ord: Order[A]): Order[Coproduct[F, G, A]] = {
implicit val ordA: Order[A] = ord
Order.orderBy((_: Coproduct[F, G, A]).run)
}
}
implicit def AlgebraMZip[M[_]: Applicative, F[_]: Functor]
: Zip[AlgebraM[M, F, ?]] =
new Zip[AlgebraM[M, F, ?]] {
def zip[A, B](a: ⇒ AlgebraM[M, F, A], b: ⇒ AlgebraM[M, F, B]) =
w => Bitraverse[(?, ?)].bisequence((a(w ∘ (_._1)), b(w ∘ (_._2))))
}
}
| drostron/quasar | foundation/src/main/scala/quasar/contrib/matryoshka/package.scala | Scala | apache-2.0 | 3,894 |
package org.coursera.naptime.ari.graphql
import com.linkedin.data.DataMap
import org.coursera.courier.templates.DataTemplates.DataConversion
import org.coursera.naptime.ari.graphql.models.AnyData
import org.coursera.naptime.ari.graphql.models.Coordinates
import org.coursera.naptime.ari.graphql.models.CoursePlatform
import org.coursera.naptime.ari.graphql.models.MergedCourse
import org.coursera.naptime.ari.graphql.models.MergedCourse.OriginalId.StringMember
import org.coursera.naptime.ari.graphql.models.PlatformSpecificData.OldPlatformDataMember
import org.coursera.naptime.ari.graphql.models.MergedCourses
import org.coursera.naptime.ari.graphql.models.MergedInstructor
import org.coursera.naptime.ari.graphql.models.MergedPartner
import org.coursera.naptime.ari.graphql.models.OldPlatformData
import org.coursera.naptime.schema.Attribute
import org.coursera.naptime.schema.Handler
import org.coursera.naptime.schema.HandlerKind
import org.coursera.naptime.schema.JsValue
import org.coursera.naptime.schema.Parameter
import org.coursera.naptime.schema.Resource
import org.coursera.naptime.schema.ResourceKind
import scala.collection.JavaConverters._
object Models {
val courseResource = Resource(
kind = ResourceKind.COLLECTION,
name = "courses",
version = Some(1),
keyType = "",
valueType = "",
mergedType = "org.coursera.naptime.ari.graphql.models.MergedCourse",
handlers = List(
Handler(
kind = HandlerKind.GET,
name = "get",
parameters = List(Parameter(name = "id", `type` = "String", attributes = List.empty)),
attributes = List.empty),
Handler(
kind = HandlerKind.MULTI_GET,
name = "multiGet",
parameters = List(Parameter(name = "ids", `type` = "List[String]", attributes = List.empty)),
attributes = List.empty),
Handler(
kind = HandlerKind.GET_ALL,
name = "getAll",
parameters = List.empty,
attributes = List.empty)),
className = "",
attributes = List(Attribute("doc", Some(JsValue.build(
new DataMap(Map("service" -> "myService").asJava),
DataConversion.SetReadOnly)))))
val instructorResource = Resource(
kind = ResourceKind.COLLECTION,
name = "instructors",
version = Some(1),
keyType = "",
valueType = "",
mergedType = "org.coursera.naptime.ari.graphql.models.MergedInstructor",
handlers = List(
Handler(
kind = HandlerKind.GET,
name = "get",
parameters = List(Parameter(name = "id", `type` = "String", attributes = List.empty)),
attributes = List.empty),
Handler(
kind = HandlerKind.MULTI_GET,
name = "multiGet",
parameters = List(Parameter(name = "ids", `type` = "List[String]", attributes = List.empty)),
attributes = List.empty),
Handler(
kind = HandlerKind.GET_ALL,
name = "getAll",
parameters = List.empty,
attributes = List.empty)),
className = "",
attributes = List.empty)
val partnersResource = Resource(
kind = ResourceKind.COLLECTION,
name = "partners",
version = Some(1),
keyType = "",
valueType = "",
mergedType = "org.coursera.naptime.ari.graphql.models.MergedPartner",
handlers = List(
Handler(
kind = HandlerKind.GET,
name = "get",
parameters = List(Parameter(name = "id", `type` = "Int", attributes = List.empty)),
attributes = List.empty),
Handler(
kind = HandlerKind.MULTI_GET,
name = "multiGet",
parameters = List(Parameter(name = "ids", `type` = "List[Int]", attributes = List.empty)),
attributes = List.empty),
Handler(
kind = HandlerKind.GET_ALL,
name = "getAll",
parameters = List.empty,
attributes = List.empty)),
className = "",
attributes = List.empty)
val fakeModelResource = Resource(
kind = ResourceKind.COLLECTION,
name = "fakeModels",
version = Some(1),
keyType = "",
valueType = "",
mergedType = "org.coursera.naptime.ari.graphql.models.FakeModel",
handlers = List(
Handler(
kind = HandlerKind.GET,
name = "get",
parameters = List(Parameter(name = "id", `type` = "String", attributes = List.empty)),
attributes = List.empty),
Handler(
kind = HandlerKind.MULTI_GET,
name = "multiGet",
parameters = List(Parameter(name = "ids", `type` = "List[String]", attributes = List
.empty)),
attributes = List.empty),
Handler(
kind = HandlerKind.GET_ALL,
name = "getAll",
parameters = List.empty,
attributes = List.empty)),
className = "",
attributes = List.empty)
val multigetFreeEntity = Resource(
kind = ResourceKind.COLLECTION,
name = "multigetFreeEntity",
version = Some(1),
keyType = "",
valueType = "",
mergedType = "org.coursera.naptime.ari.graphql.models.MergedMultigetFreeEntity",
handlers = List(
Handler(
kind = HandlerKind.GET,
name = "get",
parameters = List(Parameter(name = "id", `type` = "String", attributes = List.empty)),
attributes = List.empty),
Handler(
kind = HandlerKind.GET_ALL,
name = "getAll",
parameters = List.empty,
attributes = List.empty),
Handler(
kind = HandlerKind.FINDER,
name = "finder",
parameters = List(Parameter(name = "id", `type` = "String", attributes = List.empty)),
attributes = List.empty)),
className = "",
attributes = List.empty)
val pointerEntity = Resource(
kind = ResourceKind.COLLECTION,
name = "pointerEntity",
version = Some(1),
keyType = "",
valueType = "",
mergedType = "org.coursera.naptime.ari.graphql.models.MergedPointerEntity",
handlers = List(
Handler(
kind = HandlerKind.GET,
name = "get",
parameters = List(Parameter(name = "id", `type` = "String", attributes = List.empty)),
attributes = List.empty),
Handler(
kind = HandlerKind.MULTI_GET,
name = "multiGet",
parameters = List(Parameter(name = "ids", `type` = "List[Int]", attributes = List.empty)),
attributes = List.empty),
Handler(
kind = HandlerKind.GET_ALL,
name = "getAll",
parameters = List.empty,
attributes = List.empty)),
className = "",
attributes = List.empty)
val oldPlatformNotAvailableMessageA = "Not Available."
val oldPlatformNotAvailableMessageB = "Still Not Available."
val oldCourseIdA = "oldCourseIdA"
val oldCourseIdB = "oldCourseIdB"
val originalId = StringMember("originalIdValue")
val COURSE_A = MergedCourse(
id = "courseAId",
name = "Machine Learning",
slug = "machine-learning",
description = Some("An awesome course on machine learning."),
instructorIds = List("instructor1Id", "instructor2Id"),
partnerId = 123,
originalId = originalId,
platformSpecificData = OldPlatformDataMember(OldPlatformData(oldPlatformNotAvailableMessageA, oldCourseIdA)),
coursePlatform = List(CoursePlatform.NewPlatform),
arbitraryData = AnyData.build(new DataMap(), DataConversion.SetReadOnly))
val COURSE_B = MergedCourse(
id = "courseBId",
name = "Probabalistic Graphical Models",
slug = "pgm",
description = Some("An awesome course on pgm's."),
instructorIds = List("instructor2Id"),
partnerId = 123,
originalId = "",
platformSpecificData = OldPlatformDataMember(OldPlatformData(oldPlatformNotAvailableMessageB, oldCourseIdB)),
coursePlatform = List(CoursePlatform.NewPlatform),
arbitraryData = AnyData.build(new DataMap(), DataConversion.SetReadOnly))
val COURSES = MergedCourses(courses = List(COURSE_A, COURSE_B))
val INSTRUCTOR_1 = MergedInstructor(
id = "instructor1Id",
name = "Professor X",
title = "Chair",
bio = "Professor X's bio",
courseIds = List(COURSE_A.id),
partnerId = 123)
val INSTRUCTOR_2 = MergedInstructor(
id = "instructor2Id",
name = "Professor Y",
title = "Table",
bio = "Professor Y's bio",
courseIds = List(COURSE_B.id),
partnerId = 123)
val PARTNER_123 = MergedPartner(
id = 123,
name = "University X",
slug = "x-university",
geolocation = Coordinates(37.386824, -122.061005))
}
| coursera/naptime | naptime-graphql/src/test/scala/org/coursera/naptime/ari/graphql/models.scala | Scala | apache-2.0 | 8,388 |
/* Copyright 2009-2016 EPFL, Lausanne */
import leon.annotation._
import leon.lang._
object Shifts {
// Remember from C: 1 << 31 is undefined but 1u << 31 is well defined.
def _main(): Int = {
val x = 0x1
val y = 31
val z = (x << y) >>> y
if (z == x) 0 else 1
}
@extern
def main(args: Array[String]) = _main()
}
| epfl-lara/leon | src/test/resources/regression/genc/valid/Shifts.scala | Scala | gpl-3.0 | 346 |
package io.ssc.angles.pipeline.explorers
import java.io.File
import com.google.common.collect.{HashMultimap, SetMultimap}
import de.uni_leipzig.informatik.asv.gephi.chinesewhispers.ChineseWhispersClusterer
import org.apache.commons.lang3.StringUtils
import org.gephi.clustering.api.Cluster
import org.gephi.graph.api.{Graph, GraphController, GraphFactory, Node}
import org.gephi.io.exporter.api.ExportController
import org.gephi.io.exporter.preview.PNGExporter
import org.gephi.layout.plugin.openord.OpenOrdLayoutBuilder
import org.gephi.project.api.{ProjectController, Workspace}
import org.openide.util.Lookup
import org.slf4j.LoggerFactory
import scala.collection.mutable
/**
* Class for handling gephi graphs. Contains methods for importing graphs, exporting images, clustering and statistics.
*/
class GephiManager {
val logger = LoggerFactory.getLogger(classOf[GephiManager])
val projectController: ProjectController = Lookup.getDefault.lookup(classOf[ProjectController])
projectController.newProject()
val workspace: Workspace = projectController.getCurrentWorkspace
def loadGraphMap(mapGraph: Map[(String, String), Double], isDirected: Boolean): Unit = {
val graphModel = Lookup.getDefault.lookup(classOf[GraphController]).getModel(workspace)
val nodeMap = mutable.HashMap.empty[String, Node]
var graph : Graph = null
if (isDirected) {
graph = graphModel.getDirectedGraph
} else {
graph = graphModel.getUndirectedGraph
}
logger.info("Importing graph to gephi...")
mapGraph.foreach { case ((leftString, rightString), weight) =>
// Get or create nodes for left + right
val leftNode = nodeMap.getOrElseUpdate(leftString, addNewNodeToGraph(graph, graphModel.factory(), leftString))
val rightNode = nodeMap.getOrElseUpdate(rightString, addNewNodeToGraph(graph, graphModel.factory(), rightString))
// Add edge to graph
val edge = graphModel.factory().newEdge(leftNode, rightNode, weight.asInstanceOf[Float], isDirected)
graph.addEdge(edge)
}
logger.info("Imported {} nodes and {} edges", graph.getNodeCount, graph.getEdgeCount)
}
/**
* Create a new node inside the the given graph and return it.
* *
* @param graph
* @param factory
* @param nodeName
* @return
*/
private def addNewNodeToGraph(graph: Graph, factory: GraphFactory, nodeName: String): Node = {
val newNode = factory.newNode(nodeName)
graph.addNode(newNode)
newNode
}
def exportGraphToPNGImage(filename: String, height: Int, width: Int) = {
val exportController : ExportController = Lookup.getDefault.lookup(classOf[ExportController])
val pngExporter: PNGExporter = exportController.getExporter("png").asInstanceOf[PNGExporter]
pngExporter.setWorkspace(workspace)
pngExporter.setHeight(height)
pngExporter.setWidth(width)
logger.info("Begin PNG export from gephi data to file {}...", filename)
exportController.exportFile(new File(filename), pngExporter)
logger.info("Finished graph export")
}
def runOpenOrdLayout() = {
val graphModel = Lookup.getDefault.lookup(classOf[GraphController]).getModel(workspace)
val openOrdLayout = new OpenOrdLayoutBuilder().buildLayout
openOrdLayout.setGraphModel(graphModel)
logger.info("Preparing OpenOrd layout...")
openOrdLayout.resetPropertiesValues()
openOrdLayout.initAlgo()
logger.info("Running OpenOrd layout...")
while (openOrdLayout.canAlgo) {
openOrdLayout.goAlgo()
}
logger.info("Finished OpenOrd layout!")
}
def runChineseWhispersClusterer() : SetMultimap[Int, String] = {
val graphModel = Lookup.getDefault.lookup(classOf[GraphController]).getModel(workspace)
val progressTicket = new GephiProgressTicketImpl
val cwClusterer = new ChineseWhispersClusterer
logger.info("Running Chinese Whispers clusterer via gephi...")
cwClusterer.setProgressTicket(progressTicket)
cwClusterer.execute(graphModel)
logger.info("Clustering finished.")
logger.info("Generating cluster map...")
val clusters : Array[Cluster] = cwClusterer.getClusters
var resultMap : SetMultimap[Int, String] = HashMultimap.create()
var clusterId = 0
for (cluster <- clusters) {
for (node <- cluster.getNodes) {
val id: String = node.getNodeData.getId
resultMap.put(clusterId, StringUtils.strip(id, "\\""))
}
clusterId += 1
}
logger.info("Cluster map generated.")
resultMap
}
}
| jhendess/angles | src/main/scala/io/ssc/angles/pipeline/explorers/GephiManager.scala | Scala | gpl-3.0 | 4,526 |
package nl.malienkolders.htm.battle
package snippet
import net.liftweb._
import http._
import util.Helpers._
import nl.malienkolders.htm.battle.model.Viewer
import net.liftweb.util.ClearClearable
object Viewers {
def render = {
"li *" #> Viewer.findAll.map(v => v.alias.get) & ClearClearable
}
} | hema-tournament-manager/htm | htm-battle/src/main/scala/nl/malienkolders/htm/battle/snippet/Viewers.scala | Scala | apache-2.0 | 320 |
package com.themillhousegroup.gatsby
import org.specs2.mutable.Specification
import com.dividezero.stubby.core.model.{ StubRequest, StubExchange }
import org.specs2.mock.Mockito
import com.themillhousegroup.gatsby.stubby.StubbyServer
import com.themillhousegroup.gatsby.test.MockedLogging
class GatsbySimulationSpec extends Specification with Mockito {
class TestGatsbySimulation(swes: Seq[StubExchange]) extends GatsbySimulation(8888) with MockedLogging {
lazy val mockStubbyServer = mock[StubbyServer]
override lazy val simulationWideExchanges = swes
stubbyServers += (8888 -> mockStubbyServer)
override def startStubbyOnPort(port: Int): StubbyServer = {
mockStubbyServer
}
override def mainServer = mockStubbyServer
}
"GatsbySimulation" should {
"Allow Simulation-wide exchanges to be defined" in {
val mockExchange = mock[StubExchange]
val testGatsbySimulation = new TestGatsbySimulation(Seq(mockExchange))
testGatsbySimulation.before()
testGatsbySimulation.simulationWideExchanges must haveLength(1)
// there was one(mockStubbyServer).addExchange(mockExchange)
}
"Allow single exchanges to be added per-request" in {
val testGatsbySimulation = new TestGatsbySimulation(Nil)
val mockStubExchange = givenStubExchange("GET", "/bar")
testGatsbySimulation.addExchange("foo", mockStubExchange) must beTrue
testGatsbySimulation.addExchange("foo", mockStubExchange) must beFalse
}
"Allow multiple exchanges to be added per-request" in {
val testGatsbySimulation = new TestGatsbySimulation(Nil)
val mockStubExchange1 = givenStubExchange("GET", "/bar")
val mockStubExchange2 = givenStubExchange("POST", "/baz")
testGatsbySimulation.addExchanges("foo", Seq(mockStubExchange1, mockStubExchange2)) must beTrue
testGatsbySimulation.addExchange("foo", mockStubExchange1) must beFalse // We already know it
there were two(testGatsbySimulation.mockStubbyServer).addExchange(any[StubExchange])
}
"Allow exchanges to be removed per-prefix" in {
val testGatsbySimulation = new TestGatsbySimulation(Nil)
val mockStubExchange = givenStubExchange("GET", "/bar")
testGatsbySimulation.addExchange("foo", mockStubExchange) must beTrue
testGatsbySimulation.removeExchange("fo") must beTrue
testGatsbySimulation.removeExchange("fox") must beFalse
testGatsbySimulation.removeExchange("fo") must beFalse
}
}
def givenStubExchange(method: String, path: String) = {
val mockStubExchange = mock[StubExchange]
val mockStubRequest = mock[StubRequest]
mockStubExchange.request returns mockStubRequest
mockStubRequest.method returns Some(method)
mockStubRequest.path returns Some(path)
mockStubExchange
}
}
| themillhousegroup/gatsby | src/test/scala/com/themillhousegroup/gatsby/GatsbySimulationSpec.scala | Scala | mit | 2,830 |
package com.karasiq.bootstrap.tooltip
import scala.language.postfixOps
import com.karasiq.bootstrap.components.BootstrapComponents
import com.karasiq.bootstrap.context.RenderingContext
import com.karasiq.bootstrap.utils.Utils
trait TextTooltips { self: RenderingContext with BootstrapComponents with Tooltips with Utils ⇒
import scalaTags.all._
import BootstrapAttrs._
class TextTooltip(val options: TooltipOptions) extends AbstractTooltip {
override def render(md: ModifierT*): ModifierT = {
(`data-toggle` := "tooltip") +: Bootstrap.dataProps(options.toStrings:_*) +: md
}
}
/**
* Inspired by the excellent jQuery.tipsy plugin written by Jason Frame;
* Tooltips are an updated version, which don't rely on images, use CSS3 for animations, and data-attributes for local title storage.
* Tooltips with zero-length titles are never displayed.
* @see [[http://getbootstrap.com/javascript/#tooltips]]
*/
object Tooltip extends TooltipFactory {
def apply(content: Frag, placement: TooltipPlacement): AbstractTooltip = {
new TextTooltip(TooltipOptions(html = true, title = content, placement = placement))
}
}
}
| Karasiq/scalajs-bootstrap | library/shared/src/main/scala/com/karasiq/bootstrap/tooltip/TextTooltips.scala | Scala | mit | 1,177 |
package jp.co.cyberagent.aeromock.core
import scala.reflect.ClassTag
import scalaz.Scalaz._
import scalaz.Validation
import scalaz.Validation._
import org.apache.commons.lang3.StringUtils
object Validations {
def cast[S: ClassTag](value: Any): Validation[Throwable, S] = {
val t = implicitly[ClassTag[S]].runtimeClass.asInstanceOf[Class[S]]
fromTryCatch(t.cast(value))
}
def blank(value: String): Validation[Throwable, String] = {
if (StringUtils.isBlank(value)) {
new IllegalArgumentException("must be not blank").failure[String]
} else {
value.success[Throwable]
}
}
}
| CyberAgent/aeromock | aeromock-server/src/main/scala/jp/co/cyberagent/aeromock/core/Validations.scala | Scala | mit | 617 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.json
import java.io.{ByteArrayOutputStream, CharConversionException}
import java.nio.charset.MalformedInputException
import scala.collection.mutable.ArrayBuffer
import scala.util.control.NonFatal
import com.fasterxml.jackson.core._
import org.apache.spark.SparkUpgradeException
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.{InternalRow, NoopFilters, StructFilters}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.catalyst.util.LegacyDateFormats.FAST_DATE_FORMAT
import org.apache.spark.sql.errors.QueryExecutionErrors
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.Filter
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
import org.apache.spark.util.Utils
/**
* Constructs a parser for a given schema that translates a json string to an [[InternalRow]].
*/
class JacksonParser(
schema: DataType,
val options: JSONOptions,
allowArrayAsStructs: Boolean,
filters: Seq[Filter] = Seq.empty) extends Logging {
import JacksonUtils._
import com.fasterxml.jackson.core.JsonToken._
// A `ValueConverter` is responsible for converting a value from `JsonParser`
// to a value in a field for `InternalRow`.
private type ValueConverter = JsonParser => AnyRef
// `ValueConverter`s for the root schema for all fields in the schema
private val rootConverter = makeRootConverter(schema)
private val factory = options.buildJsonFactory()
private lazy val timestampFormatter = TimestampFormatter(
options.timestampFormat,
options.zoneId,
options.locale,
legacyFormat = FAST_DATE_FORMAT,
isParsing = true)
private lazy val dateFormatter = DateFormatter(
options.dateFormat,
options.locale,
legacyFormat = FAST_DATE_FORMAT,
isParsing = true)
/**
* Create a converter which converts the JSON documents held by the `JsonParser`
* to a value according to a desired schema. This is a wrapper for the method
* `makeConverter()` to handle a row wrapped with an array.
*/
private def makeRootConverter(dt: DataType): JsonParser => Iterable[InternalRow] = {
dt match {
case st: StructType => makeStructRootConverter(st)
case mt: MapType => makeMapRootConverter(mt)
case at: ArrayType => makeArrayRootConverter(at)
}
}
private def makeStructRootConverter(st: StructType): JsonParser => Iterable[InternalRow] = {
val elementConverter = makeConverter(st)
val fieldConverters = st.map(_.dataType).map(makeConverter).toArray
val jsonFilters = if (SQLConf.get.jsonFilterPushDown) {
new JsonFilters(filters, st)
} else {
new NoopFilters
}
(parser: JsonParser) => parseJsonToken[Iterable[InternalRow]](parser, st) {
case START_OBJECT => convertObject(parser, st, fieldConverters, jsonFilters, isRoot = true)
// SPARK-3308: support reading top level JSON arrays and take every element
// in such an array as a row
//
// For example, we support, the JSON data as below:
//
// [{"a":"str_a_1"}]
// [{"a":"str_a_2"}, {"b":"str_b_3"}]
//
// resulting in:
//
// List([str_a_1,null])
// List([str_a_2,null], [null,str_b_3])
//
case START_ARRAY if allowArrayAsStructs =>
val array = convertArray(parser, elementConverter)
// Here, as we support reading top level JSON arrays and take every element
// in such an array as a row, this case is possible.
if (array.numElements() == 0) {
Array.empty[InternalRow]
} else {
array.toArray[InternalRow](schema)
}
case START_ARRAY =>
throw QueryExecutionErrors.cannotParseJsonArraysAsStructsError()
}
}
private def makeMapRootConverter(mt: MapType): JsonParser => Iterable[InternalRow] = {
val fieldConverter = makeConverter(mt.valueType)
(parser: JsonParser) => parseJsonToken[Iterable[InternalRow]](parser, mt) {
case START_OBJECT => Some(InternalRow(convertMap(parser, fieldConverter)))
}
}
private def makeArrayRootConverter(at: ArrayType): JsonParser => Iterable[InternalRow] = {
val elemConverter = makeConverter(at.elementType)
(parser: JsonParser) => parseJsonToken[Iterable[InternalRow]](parser, at) {
case START_ARRAY => Some(InternalRow(convertArray(parser, elemConverter)))
case START_OBJECT if at.elementType.isInstanceOf[StructType] =>
// This handles the case when an input JSON object is a structure but
// the specified schema is an array of structures. In that case, the input JSON is
// considered as an array of only one element of struct type.
// This behavior was introduced by changes for SPARK-19595.
//
// For example, if the specified schema is ArrayType(new StructType().add("i", IntegerType))
// and JSON input as below:
//
// [{"i": 1}, {"i": 2}]
// [{"i": 3}]
// {"i": 4}
//
// The last row is considered as an array with one element, and result of conversion:
//
// Seq(Row(1), Row(2))
// Seq(Row(3))
// Seq(Row(4))
//
val st = at.elementType.asInstanceOf[StructType]
val fieldConverters = st.map(_.dataType).map(makeConverter).toArray
Some(InternalRow(new GenericArrayData(convertObject(parser, st, fieldConverters).toArray)))
}
}
private val decimalParser = ExprUtils.getDecimalParser(options.locale)
/**
* Create a converter which converts the JSON documents held by the `JsonParser`
* to a value according to a desired schema.
*/
def makeConverter(dataType: DataType): ValueConverter = dataType match {
case BooleanType =>
(parser: JsonParser) => parseJsonToken[java.lang.Boolean](parser, dataType) {
case VALUE_TRUE => true
case VALUE_FALSE => false
}
case ByteType =>
(parser: JsonParser) => parseJsonToken[java.lang.Byte](parser, dataType) {
case VALUE_NUMBER_INT => parser.getByteValue
}
case ShortType =>
(parser: JsonParser) => parseJsonToken[java.lang.Short](parser, dataType) {
case VALUE_NUMBER_INT => parser.getShortValue
}
case IntegerType =>
(parser: JsonParser) => parseJsonToken[java.lang.Integer](parser, dataType) {
case VALUE_NUMBER_INT => parser.getIntValue
}
case LongType =>
(parser: JsonParser) => parseJsonToken[java.lang.Long](parser, dataType) {
case VALUE_NUMBER_INT => parser.getLongValue
}
case FloatType =>
(parser: JsonParser) => parseJsonToken[java.lang.Float](parser, dataType) {
case VALUE_NUMBER_INT | VALUE_NUMBER_FLOAT =>
parser.getFloatValue
case VALUE_STRING if parser.getTextLength >= 1 =>
// Special case handling for NaN and Infinity.
parser.getText match {
case "NaN" => Float.NaN
case "Infinity" => Float.PositiveInfinity
case "-Infinity" => Float.NegativeInfinity
case other => throw QueryExecutionErrors.cannotParseStringAsDataTypeError(
other, FloatType)
}
}
case DoubleType =>
(parser: JsonParser) => parseJsonToken[java.lang.Double](parser, dataType) {
case VALUE_NUMBER_INT | VALUE_NUMBER_FLOAT =>
parser.getDoubleValue
case VALUE_STRING if parser.getTextLength >= 1 =>
// Special case handling for NaN and Infinity.
parser.getText match {
case "NaN" => Double.NaN
case "Infinity" => Double.PositiveInfinity
case "-Infinity" => Double.NegativeInfinity
case other =>
throw QueryExecutionErrors.cannotParseStringAsDataTypeError(other, DoubleType)
}
}
case StringType =>
(parser: JsonParser) => parseJsonToken[UTF8String](parser, dataType) {
case VALUE_STRING =>
UTF8String.fromString(parser.getText)
case _ =>
// Note that it always tries to convert the data as string without the case of failure.
val writer = new ByteArrayOutputStream()
Utils.tryWithResource(factory.createGenerator(writer, JsonEncoding.UTF8)) {
generator => generator.copyCurrentStructure(parser)
}
UTF8String.fromBytes(writer.toByteArray)
}
case TimestampType =>
(parser: JsonParser) => parseJsonToken[java.lang.Long](parser, dataType) {
case VALUE_STRING if parser.getTextLength >= 1 =>
try {
timestampFormatter.parse(parser.getText)
} catch {
case NonFatal(e) =>
// If fails to parse, then tries the way used in 2.0 and 1.x for backwards
// compatibility.
val str = DateTimeUtils.cleanLegacyTimestampStr(UTF8String.fromString(parser.getText))
DateTimeUtils.stringToTimestamp(str, options.zoneId).getOrElse(throw e)
}
case VALUE_NUMBER_INT =>
parser.getLongValue * 1000000L
}
case DateType =>
(parser: JsonParser) => parseJsonToken[java.lang.Integer](parser, dataType) {
case VALUE_STRING if parser.getTextLength >= 1 =>
try {
dateFormatter.parse(parser.getText)
} catch {
case NonFatal(e) =>
// If fails to parse, then tries the way used in 2.0 and 1.x for backwards
// compatibility.
val str = DateTimeUtils.cleanLegacyTimestampStr(UTF8String.fromString(parser.getText))
DateTimeUtils.stringToDate(str).getOrElse {
// In Spark 1.5.0, we store the data as number of days since epoch in string.
// So, we just convert it to Int.
try {
RebaseDateTime.rebaseJulianToGregorianDays(parser.getText.toInt)
} catch {
case _: NumberFormatException => throw e
}
}.asInstanceOf[Integer]
}
}
case BinaryType =>
(parser: JsonParser) => parseJsonToken[Array[Byte]](parser, dataType) {
case VALUE_STRING => parser.getBinaryValue
}
case dt: DecimalType =>
(parser: JsonParser) => parseJsonToken[Decimal](parser, dataType) {
case (VALUE_NUMBER_INT | VALUE_NUMBER_FLOAT) =>
Decimal(parser.getDecimalValue, dt.precision, dt.scale)
case VALUE_STRING if parser.getTextLength >= 1 =>
val bigDecimal = decimalParser(parser.getText)
Decimal(bigDecimal, dt.precision, dt.scale)
}
case CalendarIntervalType => (parser: JsonParser) =>
parseJsonToken[CalendarInterval](parser, dataType) {
case VALUE_STRING =>
IntervalUtils.safeStringToInterval(UTF8String.fromString(parser.getText))
}
case st: StructType =>
val fieldConverters = st.map(_.dataType).map(makeConverter).toArray
(parser: JsonParser) => parseJsonToken[InternalRow](parser, dataType) {
case START_OBJECT => convertObject(parser, st, fieldConverters).get
}
case at: ArrayType =>
val elementConverter = makeConverter(at.elementType)
(parser: JsonParser) => parseJsonToken[ArrayData](parser, dataType) {
case START_ARRAY => convertArray(parser, elementConverter)
}
case mt: MapType =>
val valueConverter = makeConverter(mt.valueType)
(parser: JsonParser) => parseJsonToken[MapData](parser, dataType) {
case START_OBJECT => convertMap(parser, valueConverter)
}
case udt: UserDefinedType[_] =>
makeConverter(udt.sqlType)
case _ =>
(parser: JsonParser) =>
// Here, we pass empty `PartialFunction` so that this case can be
// handled as a failed conversion. It will throw an exception as
// long as the value is not null.
parseJsonToken[AnyRef](parser, dataType)(PartialFunction.empty[JsonToken, AnyRef])
}
/**
* This method skips `FIELD_NAME`s at the beginning, and handles nulls ahead before trying
* to parse the JSON token using given function `f`. If the `f` failed to parse and convert the
* token, call `failedConversion` to handle the token.
*/
private def parseJsonToken[R >: Null](
parser: JsonParser,
dataType: DataType)(f: PartialFunction[JsonToken, R]): R = {
parser.getCurrentToken match {
case FIELD_NAME =>
// There are useless FIELD_NAMEs between START_OBJECT and END_OBJECT tokens
parser.nextToken()
parseJsonToken[R](parser, dataType)(f)
case null | VALUE_NULL => null
case other => f.applyOrElse(other, failedConversion(parser, dataType))
}
}
private val allowEmptyString = SQLConf.get.getConf(SQLConf.LEGACY_ALLOW_EMPTY_STRING_IN_JSON)
/**
* This function throws an exception for failed conversion. For empty string on data types
* except for string and binary types, this also throws an exception.
*/
private def failedConversion[R >: Null](
parser: JsonParser,
dataType: DataType): PartialFunction[JsonToken, R] = {
// SPARK-25040: Disallows empty strings for data types except for string and binary types.
// But treats empty strings as null for certain types if the legacy config is enabled.
case VALUE_STRING if parser.getTextLength < 1 && allowEmptyString =>
dataType match {
case FloatType | DoubleType | TimestampType | DateType =>
throw QueryExecutionErrors.failToParseEmptyStringForDataTypeError(dataType)
case _ => null
}
case VALUE_STRING if parser.getTextLength < 1 =>
throw QueryExecutionErrors.failToParseEmptyStringForDataTypeError(dataType)
case token =>
// We cannot parse this token based on the given data type. So, we throw a
// RuntimeException and this exception will be caught by `parse` method.
throw QueryExecutionErrors.failToParseValueForDataTypeError(dataType, token)
}
/**
* Parse an object from the token stream into a new Row representing the schema.
* Fields in the json that are not defined in the requested schema will be dropped.
*/
private def convertObject(
parser: JsonParser,
schema: StructType,
fieldConverters: Array[ValueConverter],
structFilters: StructFilters = new NoopFilters(),
isRoot: Boolean = false): Option[InternalRow] = {
val row = new GenericInternalRow(schema.length)
var badRecordException: Option[Throwable] = None
var skipRow = false
structFilters.reset()
while (!skipRow && nextUntil(parser, JsonToken.END_OBJECT)) {
schema.getFieldIndex(parser.getCurrentName) match {
case Some(index) =>
try {
row.update(index, fieldConverters(index).apply(parser))
skipRow = structFilters.skipRow(row, index)
} catch {
case e: SparkUpgradeException => throw e
case NonFatal(e) if isRoot =>
badRecordException = badRecordException.orElse(Some(e))
parser.skipChildren()
}
case None =>
parser.skipChildren()
}
}
if (skipRow) {
None
} else if (badRecordException.isEmpty) {
Some(row)
} else {
throw PartialResultException(row, badRecordException.get)
}
}
/**
* Parse an object as a Map, preserving all fields.
*/
private def convertMap(
parser: JsonParser,
fieldConverter: ValueConverter): MapData = {
val keys = ArrayBuffer.empty[UTF8String]
val values = ArrayBuffer.empty[Any]
while (nextUntil(parser, JsonToken.END_OBJECT)) {
keys += UTF8String.fromString(parser.getCurrentName)
values += fieldConverter.apply(parser)
}
// The JSON map will never have null or duplicated map keys, it's safe to create a
// ArrayBasedMapData directly here.
ArrayBasedMapData(keys.toArray, values.toArray)
}
/**
* Parse an object as a Array.
*/
private def convertArray(
parser: JsonParser,
fieldConverter: ValueConverter): ArrayData = {
val values = ArrayBuffer.empty[Any]
while (nextUntil(parser, JsonToken.END_ARRAY)) {
values += fieldConverter.apply(parser)
}
new GenericArrayData(values.toArray)
}
/**
* Parse the JSON input to the set of [[InternalRow]]s.
*
* @param recordLiteral an optional function that will be used to generate
* the corrupt record text instead of record.toString
*/
def parse[T](
record: T,
createParser: (JsonFactory, T) => JsonParser,
recordLiteral: T => UTF8String): Iterable[InternalRow] = {
try {
Utils.tryWithResource(createParser(factory, record)) { parser =>
// a null first token is equivalent to testing for input.trim.isEmpty
// but it works on any token stream and not just strings
parser.nextToken() match {
case null => None
case _ => rootConverter.apply(parser) match {
case null => throw QueryExecutionErrors.rootConverterReturnNullError()
case rows => rows.toSeq
}
}
}
} catch {
case e: SparkUpgradeException => throw e
case e @ (_: RuntimeException | _: JsonProcessingException | _: MalformedInputException) =>
// JSON parser currently doesn't support partial results for corrupted records.
// For such records, all fields other than the field configured by
// `columnNameOfCorruptRecord` are set to `null`.
throw BadRecordException(() => recordLiteral(record), () => None, e)
case e: CharConversionException if options.encoding.isEmpty =>
val msg =
"""JSON parser cannot handle a character in its input.
|Specifying encoding as an input option explicitly might help to resolve the issue.
|""".stripMargin + e.getMessage
val wrappedCharException = new CharConversionException(msg)
wrappedCharException.initCause(e)
throw BadRecordException(() => recordLiteral(record), () => None, wrappedCharException)
case PartialResultException(row, cause) =>
throw BadRecordException(
record = () => recordLiteral(record),
partialResult = () => Some(row),
cause)
}
}
}
| wangmiao1981/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala | Scala | apache-2.0 | 19,246 |
package com.karasiq.nanoboard.sources.bitmessage
import scala.concurrent.Future
import scala.language.{dynamics, implicitConversions}
import akka.http.scaladsl.HttpExt
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.headers.{Authorization, BasicHttpCredentials}
import akka.stream.ActorMaterializer
import scalatags.Text.all._
import scalatags.text.Builder
import com.karasiq.nanoboard.sources.bitmessage.XmlRpcProxy.{XmlRpcParameter, XmlRpcTags}
/**
* Simple XML-RPC wrapper, based on `akka-http`
*/
private[bitmessage] final class XmlRpcProxy(http: HttpExt, apiAddress: String, apiPort: Int, apiUsername: String, apiPassword: String)(implicit am: ActorMaterializer) extends Dynamic {
def applyDynamic(method: String)(args: XmlRpcParameter*): Future[HttpResponse] = {
import XmlRpcTags._
val entity = "<?xml version=\\"1.0\\"?>" + methodCall(
methodName(method),
params(
for (arg ← args) yield param(value(arg))
)
)
val authentication = Authorization(BasicHttpCredentials(apiUsername, apiPassword))
val url = s"http://$apiAddress:$apiPort/"
http.singleRequest(HttpRequest(method = HttpMethods.POST, uri = url, entity = HttpEntity(ContentTypes.`text/xml(UTF-8)`, entity), headers = List(authentication)))
}
}
private[bitmessage] object XmlRpcProxy {
object XmlRpcTags {
val methodCall = tag("methodCall")
val methodName = tag("methodName")
val params = tag("params")
val param = tag("param")
val value = tag("value")
val int = tag("int")
}
sealed trait XmlDataWrapper[T] {
def toModifier(value: T): Modifier
}
implicit object StringXmlDataWrapper extends XmlDataWrapper[String] {
def toModifier(value: String) = value
}
implicit object IntXmlDataWrapper extends XmlDataWrapper[Int] {
def toModifier(value: Int) = XmlRpcTags.int(value)
}
implicit object UnitXmlDataWrapper extends XmlDataWrapper[Unit] {
def toModifier(value: Unit) = ()
}
sealed trait XmlRpcParameter extends Modifier
implicit def anyToXmlRpcParameter[T: XmlDataWrapper](value: T): XmlRpcParameter = new XmlRpcParameter {
def applyTo(t: Builder) = implicitly[XmlDataWrapper[T]].toModifier(value).applyTo(t)
}
}
| Karasiq/nanoboard | library/src/main/scala/com/karasiq/nanoboard/sources/bitmessage/XmlRpcProxy.scala | Scala | apache-2.0 | 2,234 |
package com.example.kafka010
import java.{util => ju}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010._
import org.apache.spark.{SparkContext, TaskContext}
import scala.collection.JavaConversions._
import com.example._
object KafkaStreamingLatestExample {
def main(args: Array[String]): Unit = {
kafkaStream010Checkpointing()
}
/**
* Kafka 0.10.0 API
*/
def kafkaStream010Checkpointing() =
launchWithCheckpointing(kafkaStreaming010, appName = "Kafka010_DirectStream", checkpointPath = "checkpointing")
/**
* Kafka 0.10.0 API
*/
def kafkaStream010Itself() =
launchWithItself(kafkaStreaming010, appName = "Kafka010_DirectStream")
private def kafkaStreaming010(streamingContext: StreamingContext): Unit = {
val topics = Array("sample_topic")
val stream = KafkaUtils.createDirectStream[String, String](
streamingContext,
PreferConsistent, //It will consistently distribute partitions across all executors.
Subscribe[String, String](topics, kafkaParams)
)
stream.map(record => (record.key, record.value)).print()
stream.foreachRDD { rdd =>
val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
rdd.foreachPartition { _ =>
val o: OffsetRange = offsetRanges(TaskContext.get.partitionId)
println(s"${o.topic} ${o.partition} ${o.fromOffset} ${o.untilOffset}")
}
}
storingOffsetsItself(stream)
}
private def storingOffsetsItself(stream: InputDStream[ConsumerRecord[String, String]]) = {
stream.foreachRDD { rdd =>
val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
stream.asInstanceOf[CanCommitOffsets].commitAsync(offsetRanges)
}
}
private def kafkaRdd010() = {
val sparkContext = new SparkContext("local[*]", "kafkaRdd010")
val offsetRanges = Array(
// topic, partition, inclusive starting offset, exclusive ending offset
OffsetRange("sample_topic", 0, 10, 20),
OffsetRange("sample_topic", 1, 10, 20)
)
val params = new ju.HashMap[String, Object](kafkaParams)
val kafkaRDD = KafkaUtils.createRDD[String, String](sparkContext, params , offsetRanges, PreferConsistent)
println(kafkaRDD.map(_.value()).first())
}
} | arukavytsia/kafka-samples | spark-streaming-latest/src/main/scala/com/example/kafka010/KafkaStreamingLatestExample.scala | Scala | apache-2.0 | 2,548 |
package org.nisshiee.toban.model
import scalaz._, Scalaz._
import java.sql.Connection
import org.nisshiee.toban.model.db.MemoDb._
case class Memo(date: LocalDate, memo: String)
object Memo {
def find(date: LocalDate)(implicit c: Connection) =
findSql.on('date -> date.toDate).singleOpt(parser)
def replace(date: LocalDate, memo: String)(implicit c: Connection) =
find(date) >| {
updateSql.on('date -> date.toDate, 'memo -> memo).executeUpdate() match {
case 1 => Memo(date, memo)
case _ => new Exception("DB Error")
}
} | {
createSql.on('date -> date.toDate, 'memo -> memo).executeUpdate() match {
case 1 => Memo(date, memo)
case _ => new Exception("DB Error")
}
}
def delete(date: LocalDate)(implicit c: Connection) =
deleteSql.on(
'date -> date.toDate
).executeUpdate() match {
case 1 => true
case 0 => false
case _ => throw new Exception("DB ERROR")
}
}
trait Memos {
implicit lazy val MemoEqual = equalBy[Memo, LocalDate](_.date)
implicit lazy val MemoShow = shows[Memo] {
case Memo(_, memo) => Option(memo) | ""
}
}
| nisshiee/to-ban | app/models/Memo.scala | Scala | mit | 1,155 |
package facebook4s
import facebook4s.response._
import http.client.response.HttpHeader
import play.api.libs.json._
object FacebookTestHelpers {
val NUM_SUCCESSES = 3
val NUM_ERRORS = 3
val JSON_ERROR_CODE = 190
val HTTP_SUCCESS_CODE = 200
val HTTP_ERROR_CODE = 403
val NAME = "Some Name"
def jsonPartResponse(id: Int): String =
s"""
|{
| "code": $HTTP_SUCCESS_CODE,
| "headers": [ { "name": "Content-Type", "value": "text/javascript; charset=UTF-8" } ],
| "body": "${jsonSuccessBody(id).replaceAll("\\n", "").replaceAll("""\\"""", """\\\\"""")}"
|}""".stripMargin
def jsonSuccessBody(id: Int): String = {
s"""
|{
| "name": "$NAME",
| "id": "$id"
|}""".stripMargin
}
def jsonErrorBody(id: Int): String = {
s"""
|{
| "error": {
| "message": "Message $id",
| "type": "OAuthException",
| "code": $JSON_ERROR_CODE,
| "error_subcode": 460,
| "error_user_title": "A title",
| "error_user_msg": "A message",
| "fbtrace_id": "EJplcsCHuLu"
| }
|}""".stripMargin
}
def jsonPartErrorResponse(id: Int) =
s"""
| { "code": $HTTP_ERROR_CODE,
| "headers": [ { "name": "Content-Type", "value": "text/javascript; charset=UTF-8" } ],
| "body": "${jsonErrorBody(id).replaceAll("\\n", "").replaceAll("""\\"""", """\\\\"""")}"
| }""".stripMargin
def makeJsonBody(numSuccess: Int, numErrors: Int) =
(1 to numSuccess).map { jsonPartResponse } ++ (1 to numErrors).map { jsonPartErrorResponse }
def makeJsonResponse(numSuccess: Int, numErrors: Int) =
"[" + makeJsonBody(numSuccess, numErrors).mkString(",") + "]"
val defaultHeaders = Seq(HttpHeader("Content-Type", "text/javascript; charset=UTF-8"))
val defaultPartHeaders = Seq(HttpHeader("Content-Type", "text/javascript; charset=UTF-8"))
def makeBatchResponsePartBodyData(name: String = "data-name", period: String = "day", value: JsArray): JsObject = Json.obj(
"name" → name,
"period" → period,
"values" → value)
def makeBatchResponsePartBody(data: Seq[JsObject], paging: FacebookTimePaging): JsObject = Json.obj(
"data" → data,
"paging" → Json.toJson(paging))
def makeBatchResponsePart(code: Int = 200, headers: Seq[HttpHeader] = defaultPartHeaders, body: JsObject): FacebookBatchResponsePart = {
FacebookBatchResponsePart(code, headers, body.toString)
}
def makeBatchResponse(code: Int = 200, headers: Seq[HttpHeader] = defaultHeaders, parts: Seq[FacebookBatchResponsePart]): FacebookBatchResponse = {
FacebookBatchResponse(code, code.toString, headers, Array.empty, Json.parse("{}"), parts)
}
def jsonDataPartResponse(id: Int): String =
s"""
|{
| "code": $HTTP_SUCCESS_CODE,
| "headers": [ { "name": "Content-Type", "value": "text/javascript; charset=UTF-8" } ],
| "body": "${makeJsonData(id).replaceAll("\\n", "").replaceAll("""\\"""", """\\\\"""")}"
|}""".stripMargin
def makeJsonData(numResponse: Int) =
s"""
| {
| "data": [ ${(1 to numResponse).map { jsonDataPart }.mkString(",")} ],
| "paging": {
| "previous": "since=0&until=1",
| "next": "since=2&until=3"
| }
| }
""".stripMargin
def jsonDataPart(id: Int): String =
s"""
|{
| "name": "data-name",
| "period": "day",
| "values": [${jsonDataBody(id).replaceAll("\\n", "")}]
|}""".stripMargin
def jsonDataBody(id: Int): String = {
s"""
|{
| "value": "$id"
|}""".stripMargin
}
}
| SocialOrra/social4s | facebook4s/src/test/scala/facebook4s/FacebookTestHelpers.scala | Scala | apache-2.0 | 3,649 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming.sources
import org.apache.spark.sql.{ForeachWriter, SparkSession}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.catalyst.expressions.UnsafeRow
import org.apache.spark.sql.execution.python.PythonForeachWriter
import org.apache.spark.sql.sources.v2.{DataSourceOptions, StreamingWriteSupportProvider}
import org.apache.spark.sql.sources.v2.writer.{DataWriter, WriterCommitMessage}
import org.apache.spark.sql.sources.v2.writer.streaming.{StreamingDataWriterFactory, StreamingWriteSupport}
import org.apache.spark.sql.streaming.OutputMode
import org.apache.spark.sql.types.StructType
/**
* A [[org.apache.spark.sql.sources.v2.DataSourceV2]] for forwarding data into the specified
* [[ForeachWriter]].
*
* @param writer The [[ForeachWriter]] to process all data.
* @param converter An object to convert internal rows to target type T. Either it can be
* a [[ExpressionEncoder]] or a direct converter function.
* @tparam T The expected type of the sink.
*/
case class ForeachWriteSupportProvider[T](
writer: ForeachWriter[T],
converter: Either[ExpressionEncoder[T], InternalRow => T])
extends StreamingWriteSupportProvider {
override def createStreamingWriteSupport(
queryId: String,
schema: StructType,
mode: OutputMode,
options: DataSourceOptions): StreamingWriteSupport = {
new StreamingWriteSupport {
override def commit(epochId: Long, messages: Array[WriterCommitMessage]): Unit = {}
override def abort(epochId: Long, messages: Array[WriterCommitMessage]): Unit = {}
override def createStreamingWriterFactory(): StreamingDataWriterFactory = {
val rowConverter: InternalRow => T = converter match {
case Left(enc) =>
val boundEnc = enc.resolveAndBind(
schema.toAttributes,
SparkSession.getActiveSession.get.sessionState.analyzer)
boundEnc.fromRow
case Right(func) =>
func
}
ForeachWriterFactory(writer, rowConverter)
}
override def toString: String = "ForeachSink"
}
}
}
object ForeachWriteSupportProvider {
def apply[T](
writer: ForeachWriter[T],
encoder: ExpressionEncoder[T]): ForeachWriteSupportProvider[_] = {
writer match {
case pythonWriter: PythonForeachWriter =>
new ForeachWriteSupportProvider[UnsafeRow](
pythonWriter, Right((x: InternalRow) => x.asInstanceOf[UnsafeRow]))
case _ =>
new ForeachWriteSupportProvider[T](writer, Left(encoder))
}
}
}
case class ForeachWriterFactory[T](
writer: ForeachWriter[T],
rowConverter: InternalRow => T)
extends StreamingDataWriterFactory {
override def createWriter(
partitionId: Int,
taskId: Long,
epochId: Long): ForeachDataWriter[T] = {
new ForeachDataWriter(writer, rowConverter, partitionId, epochId)
}
}
/**
* A [[DataWriter]] which writes data in this partition to a [[ForeachWriter]].
*
* @param writer The [[ForeachWriter]] to process all data.
* @param rowConverter A function which can convert [[InternalRow]] to the required type [[T]]
* @param partitionId
* @param epochId
* @tparam T The type expected by the writer.
*/
class ForeachDataWriter[T](
writer: ForeachWriter[T],
rowConverter: InternalRow => T,
partitionId: Int,
epochId: Long)
extends DataWriter[InternalRow] {
// If open returns false, we should skip writing rows.
private val opened = writer.open(partitionId, epochId)
override def write(record: InternalRow): Unit = {
if (!opened) return
try {
writer.process(rowConverter(record))
} catch {
case t: Throwable =>
writer.close(t)
throw t
}
}
override def commit(): WriterCommitMessage = {
writer.close(null)
ForeachWriterCommitMessage
}
override def abort(): Unit = {}
}
/**
* An empty [[WriterCommitMessage]]. [[ForeachWriter]] implementations have no global coordination.
*/
case object ForeachWriterCommitMessage extends WriterCommitMessage
| WindCanDie/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/sources/ForeachWriteSupportProvider.scala | Scala | apache-2.0 | 4,979 |
package jkm.cineclub.raft.DBWrapper
import akka.actor._
import scala.concurrent.duration._
import scala.Some
/**
* Created with IntelliJ IDEA.
* User: cineclub
* Date: 9/18/13
* Time: 11:29 PM
* To change this template use File | Settings | File Templates.
*/
class ReqRspHandlerDependencyInjector(operator:ActorRef, timeout:Duration,msgBufferSize:Int) extends IndirectActorProducer {
override def actorClass = classOf[ReqRspHandler]
override def produce = new ReqRspHandler(operator,timeout,msgBufferSize)
}
class ReqRspHandler(val operator:ActorRef,val timeout:Duration,val msgBufferSize:Int) extends Actor {
import ReqRspHandler._
//val timeout:Duration= 5 seconds
case class Request(requester:ActorRef,req:Req)
//val operator : ActorSelection = context.actorSelection("adfasfd")
val requestBuffer=new MsgBuffer[Request](msgBufferSize)
var requested:Option[Request] =None
var requestedTime:Long = 1
def sendReq(request:Request){
requested=Some(request)
operator ! request.req
requestedTime =System.currentTimeMillis()
context.setReceiveTimeout(timeout)
}
def requestedDone() {
context.setReceiveTimeout(Duration.Undefined)
requested=None
}
def sendUnprocessedRsp(request:Request,cause:Int){
request.requester ! Rsp(request.req.reqUID,cause)
}
def checkReqRspPair(req:Req,rsp:Rsp):Boolean = req.reqUID==rsp.reqUID
def receive = {
case req:Req => {
val requester=sender
val request=Request(requester,req)
if (requested.isEmpty) sendReq(request)
else for (evictedRequest <-requestBuffer.put(request)) sendUnprocessedRsp(evictedRequest,Busy)
}
case rsp:Rsp => {
for (request <- requested ) {
if (checkReqRspPair(request.req,rsp)) {
request.requester ! rsp
requestedDone
for(request <- requestBuffer.get ) sendReq(request)
}
}
}
case ReceiveTimeout => {
//val timeDiffSec= (System.currentTimeMillis()-requestedTime).toDouble/1000.0 seconds
val timeDiffMillis= (System.currentTimeMillis()-requestedTime) millis
//if (timeDiffMillis > timeout*0.9 ) {
val q=5
if (q >3)
for(request <- requested ) {
sendUnprocessedRsp(request,Timeout)
requestedDone
for(request <- requestBuffer.get ) sendReq(request)
}
}
}
}
object ReqRspHandler{
val Ok=0
val Timeout=1
val Busy=2
val Error=3
class Req(val reqUID:Int)
class Rsp(val reqUID:Int,val processed:Int)
object Rsp{
def apply(reqUID:Int,processed:Int) {
new Rsp(reqUID,processed)
}
}
}
| stepist/scalaraft | src/main/scala/jkm/cineclub/raft/DBWrapper/ReqRspHandler.scala | Scala | apache-2.0 | 2,628 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.containerpool
import akka.actor.{Actor, ActorRef, ActorRefFactory, Props}
import org.apache.openwhisk.common.{AkkaLogging, LoggingMarkers, TransactionId}
import org.apache.openwhisk.core.connector.MessageFeed
import org.apache.openwhisk.core.entity._
import org.apache.openwhisk.core.entity.size._
import scala.collection.immutable
import scala.concurrent.duration._
import scala.util.Try
sealed trait WorkerState
case object Busy extends WorkerState
case object Free extends WorkerState
case class WorkerData(data: ContainerData, state: WorkerState)
/**
* A pool managing containers to run actions on.
*
* This pool fulfills the other half of the ContainerProxy contract. Only
* one job (either Start or Run) is sent to a child-actor at any given
* time. The pool then waits for a response of that container, indicating
* the container is done with the job. Only then will the pool send another
* request to that container.
*
* Upon actor creation, the pool will start to prewarm containers according
* to the provided prewarmConfig, iff set. Those containers will **not** be
* part of the poolsize calculation, which is capped by the poolSize parameter.
* Prewarm containers are only used, if they have matching arguments
* (kind, memory) and there is space in the pool.
*
* @param childFactory method to create new container proxy actor
* @param feed actor to request more work from
* @param prewarmConfig optional settings for container prewarming
* @param poolConfig config for the ContainerPool
*/
class ContainerPool(childFactory: ActorRefFactory => ActorRef,
feed: ActorRef,
prewarmConfig: List[PrewarmingConfig] = List.empty,
poolConfig: ContainerPoolConfig)
extends Actor {
import ContainerPool.memoryConsumptionOf
implicit val logging = new AkkaLogging(context.system.log)
var freePool = immutable.Map.empty[ActorRef, ContainerData]
var busyPool = immutable.Map.empty[ActorRef, ContainerData]
var prewarmedPool = immutable.Map.empty[ActorRef, ContainerData]
// If all memory slots are occupied and if there is currently no container to be removed, than the actions will be
// buffered here to keep order of computation.
// Otherwise actions with small memory-limits could block actions with large memory limits.
var runBuffer = immutable.Queue.empty[Run]
val logMessageInterval = 10.seconds
prewarmConfig.foreach { config =>
logging.info(this, s"pre-warming ${config.count} ${config.exec.kind} ${config.memoryLimit.toString}")(
TransactionId.invokerWarmup)
(1 to config.count).foreach { _ =>
prewarmContainer(config.exec, config.memoryLimit)
}
}
def logContainerStart(r: Run, containerState: String, activeActivations: Int): Unit = {
val namespaceName = r.msg.user.namespace.name
val actionName = r.action.name.name
val maxConcurrent = r.action.limits.concurrency.maxConcurrent
val activationId = r.msg.activationId.toString
r.msg.transid.mark(
this,
LoggingMarkers.INVOKER_CONTAINER_START(containerState),
s"containerStart containerState: $containerState ($activeActivations of max $maxConcurrent) action: $actionName namespace: $namespaceName activationId: $activationId",
akka.event.Logging.InfoLevel)
}
def receive: Receive = {
// A job to run on a container
//
// Run messages are received either via the feed or from child containers which cannot process
// their requests and send them back to the pool for rescheduling (this may happen if "docker" operations
// fail for example, or a container has aged and was destroying itself when a new request was assigned)
case r: Run =>
// Check if the message is resent from the buffer. Only the first message on the buffer can be resent.
val isResentFromBuffer = runBuffer.nonEmpty && runBuffer.dequeueOption.exists(_._1.msg == r.msg)
// Only process request, if there are no other requests waiting for free slots, or if the current request is the
// next request to process
// It is guaranteed, that only the first message on the buffer is resent.
if (runBuffer.isEmpty || isResentFromBuffer) {
val createdContainer =
// Is there enough space on the invoker for this action to be executed.
if (hasPoolSpaceFor(busyPool, r.action.limits.memory.megabytes.MB)) {
// Schedule a job to a warm container
ContainerPool
.schedule(r.action, r.msg.user.namespace.name, freePool)
.map(container => (container, "warm"))
.orElse(
// There was no warm container. Try to take a prewarm container or a cold container.
// Is there enough space to create a new container or do other containers have to be removed?
if (hasPoolSpaceFor(busyPool ++ freePool, r.action.limits.memory.megabytes.MB)) {
takePrewarmContainer(r.action)
.map(container => (container, "prewarmed"))
.orElse(Some(createContainer(r.action.limits.memory.megabytes.MB), "cold"))
} else None)
.orElse(
// Remove a container and create a new one for the given job
ContainerPool
// Only free up the amount, that is really needed to free up
.remove(freePool, Math.min(r.action.limits.memory.megabytes, memoryConsumptionOf(freePool)).MB)
.map(removeContainer)
// If the list had at least one entry, enough containers were removed to start the new container. After
// removing the containers, we are not interested anymore in the containers that have been removed.
.headOption
.map(_ =>
takePrewarmContainer(r.action)
.map(container => (container, "recreatedPrewarm"))
.getOrElse(createContainer(r.action.limits.memory.megabytes.MB), "recreated")))
} else None
createdContainer match {
case Some(((actor, data), containerState)) =>
//only move to busyPool if max reached
if (data.activeActivationCount + 1 >= r.action.limits.concurrency.maxConcurrent) {
if (r.action.limits.concurrency.maxConcurrent > 1) {
logging.info(
this,
s"container for ${r.action} is now busy with ${data.activeActivationCount + 1} activations")
}
busyPool = busyPool + (actor -> data)
freePool = freePool - actor
}
// Remove the action that get's executed now from the buffer and execute the next one afterwards.
if (isResentFromBuffer) {
// It is guaranteed that the currently executed messages is the head of the queue, if the message comes
// from the buffer
val (_, newBuffer) = runBuffer.dequeue
runBuffer = newBuffer
runBuffer.dequeueOption.foreach { case (run, _) => self ! run }
}
actor ! r // forwards the run request to the container
logContainerStart(r, containerState, data.activeActivationCount)
case None =>
// this can also happen if createContainer fails to start a new container, or
// if a job is rescheduled but the container it was allocated to has not yet destroyed itself
// (and a new container would over commit the pool)
val isErrorLogged = r.retryLogDeadline.map(_.isOverdue).getOrElse(true)
val retryLogDeadline = if (isErrorLogged) {
logging.error(
this,
s"Rescheduling Run message, too many message in the pool, " +
s"freePoolSize: ${freePool.size} containers and ${memoryConsumptionOf(freePool)} MB, " +
s"busyPoolSize: ${busyPool.size} containers and ${memoryConsumptionOf(busyPool)} MB, " +
s"maxContainersMemory ${poolConfig.userMemory.toMB} MB, " +
s"userNamespace: ${r.msg.user.namespace.name}, action: ${r.action}, " +
s"needed memory: ${r.action.limits.memory.megabytes} MB, " +
s"waiting messages: ${runBuffer.size}")(r.msg.transid)
Some(logMessageInterval.fromNow)
} else {
r.retryLogDeadline
}
if (!isResentFromBuffer) {
// Add this request to the buffer, as it is not there yet.
runBuffer = runBuffer.enqueue(r)
}
// As this request is the first one in the buffer, try again to execute it.
self ! Run(r.action, r.msg, retryLogDeadline)
}
} else {
// There are currently actions waiting to be executed before this action gets executed.
// These waiting actions were not able to free up enough memory.
runBuffer = runBuffer.enqueue(r)
}
// Container is free to take more work
case NeedWork(data: WarmedData) =>
feed ! MessageFeed.Processed
if (data.activeActivationCount < data.action.limits.concurrency.maxConcurrent) {
//remove from busy pool (may already not be there), put back into free pool (to update activation counts)
freePool = freePool + (sender() -> data)
if (busyPool.contains(sender())) {
busyPool = busyPool - sender()
if (data.action.limits.concurrency.maxConcurrent > 1) {
logging.info(
this,
s"container for ${data.action} is no longer busy with ${data.activeActivationCount} activations")
}
}
} else {
//update freePool IFF it was previously PreWarmedData (it is still free, but now has WarmedData)
//otherwise update busyPool to reflect the updated activation counts
freePool.get(sender()) match {
case Some(_: PreWarmedData) =>
freePool = freePool + (sender() -> data)
case None =>
if (data.action.limits.concurrency.maxConcurrent > 1) {
logging.info(
this,
s"container for ${data.action} is now busy with ${data.activeActivationCount} activations")
}
busyPool = busyPool + (sender() -> data)
case _ => //was free+WarmedData - do nothing
}
}
// Container is prewarmed and ready to take work
case NeedWork(data: PreWarmedData) =>
prewarmedPool = prewarmedPool + (sender() -> data)
// Container got removed
case ContainerRemoved =>
// if container was in free pool, it may have been processing (but under capacity),
// so there is capacity to accept another job request
freePool.get(sender()).foreach { f =>
freePool = freePool - sender()
if (f.activeActivationCount > 0) {
feed ! MessageFeed.Processed
}
}
// container was busy (busy indicates at full capacity), so there is capacity to accept another job request
busyPool.get(sender()).foreach { _ =>
busyPool = busyPool - sender()
feed ! MessageFeed.Processed
}
// This message is received for one of these reasons:
// 1. Container errored while resuming a warm container, could not process the job, and sent the job back
// 2. The container aged, is destroying itself, and was assigned a job which it had to send back
// 3. The container aged and is destroying itself
// Update the free/busy lists but no message is sent to the feed since there is no change in capacity yet
case RescheduleJob =>
freePool = freePool - sender()
busyPool = busyPool - sender()
}
/** Creates a new container and updates state accordingly. */
def createContainer(memoryLimit: ByteSize): (ActorRef, ContainerData) = {
val ref = childFactory(context)
val data = MemoryData(memoryLimit)
freePool = freePool + (ref -> data)
ref -> data
}
/** Creates a new prewarmed container */
def prewarmContainer(exec: CodeExec[_], memoryLimit: ByteSize): Unit =
childFactory(context) ! Start(exec, memoryLimit)
/**
* Takes a prewarm container out of the prewarmed pool
* iff a container with a matching kind and memory is found.
*
* @param action the action that holds the kind and the required memory.
* @return the container iff found
*/
def takePrewarmContainer(action: ExecutableWhiskAction): Option[(ActorRef, ContainerData)] = {
val kind = action.exec.kind
val memory = action.limits.memory.megabytes.MB
prewarmedPool
.find {
case (_, PreWarmedData(_, `kind`, `memory`, _)) => true
case _ => false
}
.map {
case (ref, data) =>
// Move the container to the usual pool
freePool = freePool + (ref -> data)
prewarmedPool = prewarmedPool - ref
// Create a new prewarm container
// NOTE: prewarming ignores the action code in exec, but this is dangerous as the field is accessible to the
// factory
prewarmContainer(action.exec, memory)
(ref, data)
}
}
/** Removes a container and updates state accordingly. */
def removeContainer(toDelete: ActorRef) = {
toDelete ! Remove
freePool = freePool - toDelete
busyPool = busyPool - toDelete
}
/**
* Calculate if there is enough free memory within a given pool.
*
* @param pool The pool, that has to be checked, if there is enough free memory.
* @param memory The amount of memory to check.
* @return true, if there is enough space for the given amount of memory.
*/
def hasPoolSpaceFor[A](pool: Map[A, ContainerData], memory: ByteSize): Boolean = {
memoryConsumptionOf(pool) + memory.toMB <= poolConfig.userMemory.toMB
}
}
object ContainerPool {
/**
* Calculate the memory of a given pool.
*
* @param pool The pool with the containers.
* @return The memory consumption of all containers in the pool in Megabytes.
*/
protected[containerpool] def memoryConsumptionOf[A](pool: Map[A, ContainerData]): Long = {
pool.map(_._2.memoryLimit.toMB).sum
}
/**
* Finds the best container for a given job to run on.
*
* Selects an arbitrary warm container from the passed pool of idle containers
* that matches the action and the invocation namespace. The implementation uses
* matching such that structural equality of action and the invocation namespace
* is required.
* Returns None iff no matching container is in the idle pool.
* Does not consider pre-warmed containers.
*
* @param action the action to run
* @param invocationNamespace the namespace, that wants to run the action
* @param idles a map of idle containers, awaiting work
* @return a container if one found
*/
protected[containerpool] def schedule[A](action: ExecutableWhiskAction,
invocationNamespace: EntityName,
idles: Map[A, ContainerData]): Option[(A, ContainerData)] = {
idles.find {
case (_, WarmedData(_, `invocationNamespace`, `action`, _, activeActivationCount))
if activeActivationCount < action.limits.concurrency.maxConcurrent =>
true
case _ => false
}
}
/**
* Finds the oldest previously used container to remove to make space for the job passed to run.
* Depending on the space that has to be allocated, several containers might be removed.
*
* NOTE: This method is never called to remove an action that is in the pool already,
* since this would be picked up earlier in the scheduler and the container reused.
*
* @param pool a map of all free containers in the pool
* @param memory the amount of memory that has to be freed up
* @return a list of containers to be removed iff found
*/
protected[containerpool] def remove[A](pool: Map[A, ContainerData], memory: ByteSize): List[A] = {
// Try to find a Free container that does NOT have any active activations AND is initialized with any OTHER action
val freeContainers = pool.collect {
// Only warm containers will be removed. Prewarmed containers will stay always.
case (ref, w: WarmedData) if w.activeActivationCount == 0 =>
ref -> w
}
if (memory > 0.B && freeContainers.nonEmpty && memoryConsumptionOf(freeContainers) >= memory.toMB) {
// Remove the oldest container if:
// - there is more memory required
// - there are still containers that can be removed
// - there are enough free containers that can be removed
val (ref, data) = freeContainers.minBy(_._2.lastUsed)
// Catch exception if remaining memory will be negative
val remainingMemory = Try(memory - data.memoryLimit).getOrElse(0.B)
List(ref) ++ remove(freeContainers - ref, remainingMemory)
} else {
// If this is the first call: All containers are in use currently, or there is more memory needed than
// containers can be removed.
// Or, if this is one of the recursions: Enough containers are found to get the memory, that is
// necessary. -> Abort recursion
List.empty
}
}
def props(factory: ActorRefFactory => ActorRef,
poolConfig: ContainerPoolConfig,
feed: ActorRef,
prewarmConfig: List[PrewarmingConfig] = List.empty) =
Props(new ContainerPool(factory, feed, prewarmConfig, poolConfig))
}
/** Contains settings needed to perform container prewarming. */
case class PrewarmingConfig(count: Int, exec: CodeExec[_], memoryLimit: ByteSize)
| starpit/openwhisk | core/invoker/src/main/scala/org/apache/openwhisk/core/containerpool/ContainerPool.scala | Scala | apache-2.0 | 18,608 |
/*
This file is part of Intake24.
Copyright 2015, 2016 Newcastle University.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package uk.ac.ncl.openlab.intake24.foodxml
import uk.ac.ncl.openlab.intake24.api.data.PortionSizeMethodParameter
case class StandardUnitDef(name: String, weight: Double, omitFoodDesc: Boolean)
object StandardUnitDef {
def toPortionSizeParameters(units: Seq[StandardUnitDef]) = PortionSizeMethodParameter("units-count", units.size.toString) +: units.zipWithIndex.flatMap {
case (unit, index) => {
Seq(PortionSizeMethodParameter("unit" + index + "-name", unit.name),
PortionSizeMethodParameter("unit" + index + "-weight", unit.weight.toString),
PortionSizeMethodParameter("unit" + index + "-omit-food-description", if (unit.omitFoodDesc) "true" else "false"))
}
}
def parsePortionSizeParameters(params: Seq[PortionSizeMethodParameter]) = {
val map = params.map(p => (p.name, p.value)).toMap
val number = map.get("units-count").map(_.toInt).getOrElse(0)
Range(0, number).map(i => {
val name = map("unit" + i + "-name")
val weight = map("unit" + i + "-weight").toDouble
val omit = map("unit" + i + "-omit-food-description") == "true"
StandardUnitDef(name, weight, omit)
})
}
} | digitalinteraction/intake24 | FoodDataXML/src/main/scala/uk/ac/ncl/openlab/intake24/foodxml/StandardUnitDef.scala | Scala | apache-2.0 | 1,765 |
package com.anadathur.elastic
import javax.lang.model.element.Modifier
import java.util.{List => JList}
import com.squareup.javapoet._
/**
* Created by ajay.nadathur on 7/18/15.
*/
package object easymapping {
val privFinal = Array(Modifier.PRIVATE, Modifier.FINAL)
val pubAbs = Array(Modifier.PUBLIC, Modifier.ABSTRACT)
def requireNotNull(obj: Any, msg: String = "Cannot be null") = require(obj != null, msg)
def requireNotEmpty(str: String, msg: String = "Cannot be null/empty") = {
requireNotNull(str, msg)
require(!str.trim.isEmpty, msg)
}
def className(pkg: String, clazz: String) = ClassName.get(pkg, clazz)
def className(clz: Class[_]) = ClassName.get(clz)
def paramTypeName(rawType: ClassName, typeArguments: TypeName*) = ParameterizedTypeName.get(rawType, typeArguments: _*)
case class MetaParam(name: String, typeName: TypeName, comments: String = "") {
def methodBuilder = {
val methodName = (if (typeName.equals(TypeName.BOOLEAN)) "is" else "get") + name.capitalize
MethodSpec.methodBuilder(methodName)
.returns(typeName)
.addJavadoc(comments)
}
def fieldBuilder = FieldSpec.builder(typeName, name).addJavadoc(comments)
def paramBuilder = ParameterSpec.builder(typeName, name)
}
def getParamsOfMappingType(config: Config) =
List(
MetaParam("typeName", TypeName.get(classOf[String]), "The name of the mapping type\n"),
MetaParam("parent", config.mappingTypeClass, "The parent mapping object\n"),
MetaParam("properties", paramTypeName(className(classOf[JList[_]]), config.propertyClassName),
"List of properties declared in this type\n"),
MetaParam("dynamic", TypeName.BOOLEAN, "Is the type dynamic\n"),
//MetaParam("indexName", TypeName.get(classOf[String]), "name of index, defaults to field "),
MetaParam("nested", TypeName.BOOLEAN, "Is the type nested\n")
)
.map( m => (m.name, m)).toMap
def getParamsOfMappingTypeParam(config: Config) =
List(
MetaParam("name", TypeName.get(classOf[String]), "The name of the field\n"),
MetaParam("parent", config.mappingTypeClass, "The parent mapping object\n"),
MetaParam("type", config.mappingTypeClass, "Type of param\n"),
MetaParam("path", TypeName.get(classOf[String]), "The absolute path of the property in the mapping file\n"),
MetaParam("stored", TypeName.BOOLEAN, "Is the value stored\n")
)
.map(m => (m.name, m)).toMap
}
| ajaykumarns/easymapping | src/main/scala/com/anadathur/elastic/easymapping/package.scala | Scala | apache-2.0 | 2,481 |
package edu.gemini.ags.impl
import edu.gemini.ags.api.{AgsAnalysis, AgsMagnitude, AgsStrategy}
import edu.gemini.ags.api.AgsStrategy.{Assignment, Estimate, Selection}
import edu.gemini.ags.gems._
import edu.gemini.ags.gems.mascot.Strehl
import edu.gemini.catalog.api._
import edu.gemini.catalog.votable._
import edu.gemini.pot.sp.SPComponentType
import edu.gemini.pot.ModelConverters._
import edu.gemini.spModel.core.SiderealTarget
import edu.gemini.spModel.ags.AgsStrategyKey.GemsKey
import edu.gemini.spModel.gemini.flamingos2.Flamingos2OiwfsGuideProbe
import edu.gemini.spModel.gemini.gems.{GemsInstrument, Canopus}
import edu.gemini.spModel.gemini.gsaoi.GsaoiOdgw
import edu.gemini.spModel.gems.{GemsTipTiltMode, GemsGuideProbeGroup, GemsGuideStarType}
import edu.gemini.spModel.obs.context.ObsContext
import edu.gemini.spModel.rich.shared.immutable._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.collection.JavaConverters._
import scala.concurrent._
import edu.gemini.ags.api.AgsMagnitude.{MagnitudeCalc, MagnitudeTable}
import edu.gemini.spModel.guide.{ValidatableGuideProbe, GuideProbeGroup, GuideProbe}
import edu.gemini.spModel.core._
import scalaz._
import Scalaz._
trait GemsStrategy extends AgsStrategy {
// By default use the remote backend but it can be overriden in tests
private [impl] def backend:VoTableBackend
override def key = GemsKey
// Since the constraints are run in parallel, we need a way to identify them after
// they are done, so we create IDs for each. This is a pretty nasty way to do things, but
// since we cannot predict in what order the results will return, we need to be able
// to pick them out somehow.
private val CanopusTipTiltId = 0
private val OdgwFlexureId = 1
// Catalog results with search keys to avoid having to recompute search key info on the fly.
private case class CatalogResultWithKey(query: CatalogQuery, catalogResult: CatalogQueryResult, searchKey: GemsCatalogSearchKey)
// Query the catalog for each constraint and compile a list of results with the necessary
// information for GeMS.
private def catalogResult(ctx: ObsContext, mt: MagnitudeTable): Future[List[CatalogResultWithKey]] = {
// Maps for IDs to types needed by GeMS.
def GuideStarTypeMap = Map[Int, GemsGuideStarType](
CanopusTipTiltId -> GemsGuideStarType.tiptilt,
OdgwFlexureId -> GemsGuideStarType.flexure
)
// Maps from IDs to guide probe groups.
def GuideProbeGroupMap = Map[Int, GemsGuideProbeGroup](
CanopusTipTiltId -> Canopus.Wfs.Group.instance,
OdgwFlexureId -> GsaoiOdgw.Group.instance
)
VoTableClient.catalogs(catalogQueries(ctx, mt), backend).flatMap {
case result if result.exists(_.result.containsError) => Future.failed(CatalogException(result.flatMap(_.result.problems)))
case result => Future.successful {
result.flatMap { qr =>
val id = qr.query.id
id.map(x => CatalogResultWithKey(qr.query, qr.result, GemsCatalogSearchKey(GuideStarTypeMap(x), GuideProbeGroupMap(x))))
}
}
}
}
// Convert from catalog results to GeMS-specific results.
private def toGemsCatalogSearchResults(ctx: ObsContext, futureAgsCatalogResults: Future[List[CatalogResultWithKey]]): Future[List[GemsCatalogSearchResults]] = {
val anglesToTry = (0 until 360 by 45).map(Angle.fromDegrees(_))
futureAgsCatalogResults.map { agsCatalogResults =>
for {
result <- agsCatalogResults
angle <- anglesToTry
} yield {
val ConeSearchCatalogQuery(_, _, radiusConstraint, mc, _) = result.query
val catalogSearchCriterion = CatalogSearchCriterion("ags", radiusConstraint, mc.head, None, angle.some)
val gemsCatalogSearchCriterion = new GemsCatalogSearchCriterion(result.searchKey, catalogSearchCriterion)
new GemsCatalogSearchResults(gemsCatalogSearchCriterion, result.catalogResult.targets.rows)
}
}
}
override def magnitudes(ctx: ObsContext, mt: MagnitudeTable): List[(GuideProbe, MagnitudeCalc)] = {
val cans = Canopus.Wfs.values().map { cwfs => mt(ctx, cwfs).map(cwfs -> _) }.toList.flatten
val odgw = GsaoiOdgw.values().map { odgw => mt(ctx, odgw).map(odgw -> _) }.toList.flatten
cans ++ odgw
}
override def analyze(ctx: ObsContext, mt: MagnitudeTable, guideProbe: ValidatableGuideProbe, guideStar: SiderealTarget): Option[AgsAnalysis] =
AgsAnalysis.analysis(ctx, mt, guideProbe, guideStar, probeBands(guideProbe))
override def analyze(ctx: ObsContext, mt: MagnitudeTable): List[AgsAnalysis] = {
import AgsAnalysis._
def mapGroup(grp: GuideProbeGroup): List[AgsAnalysis] = {
def hasGuideStarForProbe(a: AgsAnalysis): Boolean = a match {
case NoGuideStarForProbe(_, _) => false
case _ => true
}
val probeAnalysis = grp.getMembers.asScala.toList.flatMap { p => analysis(ctx, mt, p, probeBands(p)) }
probeAnalysis.filter(hasGuideStarForProbe) match {
case Nil =>
// Pick the first guide probe as representative, since we are called with either Canopus or GsaoiOdwg
~grp.getMembers.asScala.headOption.map {gp => List(NoGuideStarForGroup(grp, probeBands(gp)))}
case lst => lst
}
}
mapGroup(Canopus.Wfs.Group.instance) ++ mapGroup(GsaoiOdgw.Group.instance)
}
override def candidates(ctx: ObsContext, mt: MagnitudeTable): Future[List[(GuideProbe, List[SiderealTarget])]] = {
// Extract something we can understand from the GemsCatalogSearchResults.
def simplifiedResult(results: List[GemsCatalogSearchResults]): List[(GuideProbe, List[SiderealTarget])] =
results.flatMap { result =>
val so = result.results // extract the sky objects from this thing
// For each guide probe associated with these sky objects, add a tuple
// (guide probe, sky object list) to the results
result.criterion.key.group.getMembers.asScala.toList.map { guideProbe =>
(guideProbe, so)
}
}
// why do we need multiple position angles? catalog results are given in
// a ring (limited by radius limits) around a base position ... confusion
val posAngles = (ctx.getPositionAngle.toNewModel :: (0 until 360 by 90).map(Angle.fromDegrees(_)).toList).toSet
search(GemsTipTiltMode.canopus, ctx, posAngles, None).map(simplifiedResult)
}
override def estimate(ctx: ObsContext, mt: MagnitudeTable): Future[Estimate] = {
// Get the query results and convert them to GeMS-specific ones.
val results = toGemsCatalogSearchResults(ctx, catalogResult(ctx, mt))
// Create a set of the angles to try.
val anglesToTry = (0 until 360 by 45).map(Angle.fromDegrees(_)).toSet
// A way to terminate the Mascot algorithm immediately in the following cases:
// 1. A usable 2 or 3-star asterism is found; or
// 2. If no asterisms were found.
// Returning false will stop the search
def progress(s: Strehl, usable: Boolean): Boolean = {
!((usable && s.stars.size >= 2) || (s.stars.size < 2))
}
// Iterate over 45 degree position angles if no asterism is found at PA = 0.
val gemsCatalogResults = results.map(result => GemsResultsAnalyzer.analyzeGoodEnough(ctx, anglesToTry, result, progress))
// Filter out the 1-star asterisms. If anything is left, we are good to go; otherwise, no.
gemsCatalogResults.map { x =>
!x.exists(_.guideGroup.getTargets.size() >= 3) ? AgsStrategy.Estimate.CompleteFailure | AgsStrategy.Estimate.GuaranteedSuccess
}
}
protected [impl] def search(tipTiltMode: GemsTipTiltMode, ctx: ObsContext, posAngles: Set[Angle], nirBand: Option[MagnitudeBand]): Future[List[GemsCatalogSearchResults]] =
ctx.getBaseCoordinates.asScalaOpt.fold(Future.successful(List.empty[GemsCatalogSearchResults])) { base =>
// Get the instrument: F2 or GSAOI?
val gemsInstrument =
(ctx.getInstrument.getType == SPComponentType.INSTRUMENT_GSAOI) ? GemsInstrument.gsaoi | GemsInstrument.flamingos2
// Search options
val gemsOptions = new GemsGuideStarSearchOptions(gemsInstrument, tipTiltMode, posAngles.asJava)
// Perform the catalog search, using GemsStrategy's backend
val results = GemsVoTableCatalog(backend, UCAC4).search(ctx, base.toNewModel, gemsOptions, nirBand)
// Now check that the results are valid: there must be a valid tip-tilt and flexure star each.
results.map { r =>
val AllKeys:List[GemsGuideProbeGroup] = List(Canopus.Wfs.Group.instance, GsaoiOdgw.Group.instance)
val containedKeys = r.map(_.criterion.key.group)
// Return a list only if both guide probes returned a value
~(containedKeys.forall(AllKeys.contains) option r)
}
}
private def findGuideStars(ctx: ObsContext, posAngles: Set[Angle], results: List[GemsCatalogSearchResults]): Option[GemsGuideStars] = {
// Passing in null to say we don't want a ProgressMeter.
val gemsResults = GemsResultsAnalyzer.analyze(ctx, posAngles.asJava, results.asJava, None).asScala
gemsResults.headOption
}
override def select(ctx: ObsContext, mt: MagnitudeTable): Future[Option[Selection]] = {
val posAngles = Set(ctx.getPositionAngle.toNewModel, Angle.zero, Angle.fromDegrees(90), Angle.fromDegrees(180), Angle.fromDegrees(270))
val results = search(GemsTipTiltMode.canopus, ctx, posAngles, None)
results.map { r =>
val gemsGuideStars = findGuideStars(ctx, posAngles, r)
// Now we must convert from an Option[GemsGuideStars] to a Selection.
gemsGuideStars.map { x =>
val assignments = x.guideGroup.getAll.asScalaList.flatMap(targets => {
val guider = targets.getGuider
targets.getTargets.asScalaList.map(target => Assignment(guider, target.toNewModel))
})
Selection(x.pa, assignments)
}
}
}
override def catalogQueries(ctx: ObsContext, mt: MagnitudeTable): List[CatalogQuery] =
ctx.getBaseCoordinates.asScalaOpt.fold(List.empty[CatalogQuery]) { base =>
import AgsMagnitude._
val cond = ctx.getConditions
val mags = magnitudes(ctx, mt).toMap
def lim(gp: GuideProbe): Option[MagnitudeConstraints] = autoSearchConstraints(mags(gp), cond)
val odgwMagLimits = (lim(GsaoiOdgw.odgw1) /: GsaoiOdgw.values().drop(1)) { (ml, odgw) =>
(ml |@| lim(odgw))(_ union _).flatten
}
val canMagLimits = (lim(Canopus.Wfs.cwfs1) /: Canopus.Wfs.values().drop(1)) { (ml, can) =>
(ml |@| lim(can))(_ union _).flatten
}
val canopusConstraint = canMagLimits.map(c => CatalogQuery(CanopusTipTiltId, base.toNewModel, RadiusConstraint.between(Angle.zero, Canopus.Wfs.Group.instance.getRadiusLimits.toNewModel), List(ctx.getConditions.adjust(c)), UCAC4))
val odgwConstraint = odgwMagLimits.map(c => CatalogQuery(OdgwFlexureId, base.toNewModel, RadiusConstraint.between(Angle.zero, GsaoiOdgw.Group.instance.getRadiusLimits.toNewModel), List(ctx.getConditions.adjust(c)), UCAC4))
List(canopusConstraint, odgwConstraint).flatten
}
override val probeBands = RBandsList
// Return the band used for each probe
// TODO Delegate to GemsMagnitudeTable
private def probeBands(guideProbe: GuideProbe): BandsList = if (Canopus.Wfs.Group.instance.getMembers.contains(guideProbe)) RBandsList else SingleBand(MagnitudeBand.H)
override val guideProbes: List[GuideProbe] =
Flamingos2OiwfsGuideProbe.instance :: (GsaoiOdgw.values() ++ Canopus.Wfs.values()).toList
}
object GemsStrategy extends GemsStrategy {
override private [impl] val backend = ConeSearchBackend
}
| arturog8m/ocs | bundle/edu.gemini.ags/src/main/scala/edu/gemini/ags/impl/GemsStrategy.scala | Scala | bsd-3-clause | 11,791 |
package collins.models.asset
import play.api.libs.json.JsObject
trait RemoteAsset extends AssetView {
val json: JsObject
val hostTag: String //the asset representing the data center this asset belongs to
val remoteUrl: String
def remoteHost = Some(remoteUrl)
}
| byxorna/collins | app/collins/models/asset/RemoteAsset.scala | Scala | apache-2.0 | 272 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.