code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/**
* Various common, "dumb" data-structures that represent common things that
* are passed around inside Ammonite
*/
package ammonite.util
import scala.collection.mutable
import scala.reflect.NameTransformer
import scala.reflect.runtime.universe.TypeTag
/**
* Exception for reporting script compilation failures
*/
class CompilationError(message: String) extends Exception(message)
case class Evaluated(wrapper: Seq[Name],
imports: Imports,
tag: String,
value: Any)
/**
* Represents the importing of a single name in the Ammonite REPL, of the
* form
*
* {{{
* import $prefix.{$fromName => $toName}
* }}}
*
* All imports are reduced to this form; `import $prefix.$name` is results in
* the `fromName` and `toName` being the same, while `import $prefix._` or
* `import $prefix.{foo, bar, baz}` are split into multiple distinct
* [[ImportData]] objects.
*
* Note that imports can be of one of three distinct `ImportType`s: importing
* a type, a term, or both. This lets us properly deal with shadowing correctly
* if we import the type and term of the same name from different places
*/
case class ImportData(fromName: Name,
toName: Name,
prefix: Seq[Name],
importType: ImportData.ImportType)
object ImportData{
sealed case class ImportType(name: String)
val Type = ImportType("Type")
val Term = ImportType("Term")
val TermType = ImportType("TermType")
}
/**
* Represents the imports that occur before a piece of user code in the
* Ammonite REPL. It's basically a `Seq[ImportData]`, except we really want
* it to be always in a "canonical" form without shadowed/duplicate imports.
*
* Thus we only expose an `apply` method which performs this de-duplication,
* and a `++` operator that combines two sets of imports while performing
* de-duplication.
*/
class Imports private (val value: Seq[ImportData]){
def ++(others: Imports) = Imports(this.value, others.value)
override def toString() = s"Imports(${value.toString})"
}
object Imports{
// This isn't called directly, but we need to define it so uPickle can know
// how to read/write imports
def unapply(s: Imports): Option[Seq[ImportData]] = Some(s.value)
/**
* Constructs an `Imports` object from one or more loose sequence of imports
*
* Figures out which imports will get stomped over by future imports
* before they get used, and just ignore those.
*/
def apply(importss: Seq[ImportData]*): Imports = {
// We iterate over the combined reversed imports, keeping track of the
// things that will-be-stomped-over-in-the-non-reversed-world in a map.
// If an import's target destination will get stomped over we ignore it
//
// At the end of the day we re-reverse the trimmed list and return it.
val importData = importss.flatten
val stompedTypes = mutable.Set.empty[Name]
val stompedTerms = mutable.Set.empty[Name]
val out = mutable.Buffer.empty[ImportData]
for(data <- importData.reverseIterator){
val stomped = data.importType match{
case ImportData.Term => Seq(stompedTerms)
case ImportData.Type => Seq(stompedTypes)
case ImportData.TermType => Seq(stompedTerms, stompedTypes)
}
if (!stomped.exists(_(data.toName))){
out.append(data)
stomped.foreach(_.add(data.toName))
data.prefix.headOption.foreach(stompedTerms.remove)
}
}
new Imports(out.reverse)
}
}
/**
* Represents a single identifier in Scala source code, e.g. "scala" or
* "println" or "`Hello-World`".
*
* Holds the value "raw", with all special characters intact, e.g.
* "Hello-World". Can be used [[backticked]] e.g. "`Hello-World`", useful for
* embedding in Scala source code, or [[encoded]] e.g. "Hello$minusWorld",
* useful for accessing names as-seen-from the Java/JVM side of thigns
*/
case class Name(raw: String){
assert(
NameTransformer.decode(raw) == raw,
"Name() must be created with un-encoded text"
)
assert(raw.charAt(0) != '`', "Cannot create already-backticked identifiers")
override def toString = s"Name($backticked)"
def encoded = NameTransformer.encode(raw)
def backticked = Name.backtickWrap(raw)
}
object Name{
/**
* Read/write [[Name]]s as unboxed strings, in order to save verbosity
* in the JSON cache files as well as improving performance of
* reading/writing since we read/write [[Name]]s a *lot*.
*/
implicit val nameRW: upickle.default.ReadWriter[Name] = upickle.default.ReadWriter[Name](
name => upickle.Js.Str(name.raw),
{case upickle.Js.Str(raw) => Name(raw)}
)
val alphaKeywords = Set(
"abstract", "case", "catch", "class", "def", "do", "else",
"extends", "false", "finally", "final", "finally", "forSome",
"for", "if", "implicit", "import", "lazy", "match", "new",
"null", "object", "override", "package", "private", "protected",
"return", "sealed", "super", "this", "throw", "trait", "try",
"true", "type", "val", "var", "while", "with", "yield", "_", "macro"
)
val symbolKeywords = Set(
":", ";", "=>", "=", "<-", "<:", "<%", ">:", "#", "@", "\\u21d2", "\\u2190"
)
/**
* Custom implementation of ID parsing, instead of using the ScalaParse
* version. This lets us avoid loading FastParse and ScalaParse entirely if
* we're running a cached script, which shaves off 200-300ms of startup time.
*/
def backtickWrap(s: String) = {
if (s.isEmpty) "``"
else if (s(0) == '`' && s.last == '`') s
else {
val chunks = s.split("_", -1)
def validOperator(c: Char) = {
c.getType == Character.MATH_SYMBOL ||
c.getType == Character.OTHER_SYMBOL ||
"!#%&*+-/:<=>?@\\\\^|~".contains(c)
}
val validChunks = chunks.zipWithIndex.forall { case (chunk, index) =>
chunk.forall(c => c.isLetter || c.isDigit || c == '$') ||
(
chunk.forall(validOperator) &&
// operators can only come last
index == chunks.length - 1 &&
// but cannot be preceded by only a _
!(chunks.lift(index - 1).exists(_ == "") && index - 1== 0))
}
val firstLetterValid = s(0).isLetter || s(0) == '_' || s(0) == '$' || validOperator(s(0))
val valid =
validChunks &&
firstLetterValid &&
!alphaKeywords.contains(s) &&
!symbolKeywords.contains(s)
if (valid) s else '`' + s + '`'
}
}
}
/**
* Encapsulates a read-write cell that can be passed around
*/
trait StableRef[T]{
/**
* Get the current value of the this [[StableRef]] at this instant in time
*/
def apply(): T
/**
* Set the value of this [[StableRef]] to always be the value `t`
*/
def update(t: T): Unit
}
trait Ref[T] extends StableRef[T]{
/**
* Return a function that can be used to get the value of this [[Ref]]
* at any point in time
*/
def live(): () => T
/**
* Set the value of this [[Ref]] to always be the value of the by-name
* argument `t`, at any point in time
*/
def bind(t: => T): Unit
}
object Ref{
implicit def refer[T](t: T): Ref[T] = Ref(t)
def live[T](value0: () => T) = new Ref[T]{
var value: () => T = value0
def live() = value
def apply() = value()
def update(t: T) = value = () => t
def bind(t: => T): Unit = value = () => t
override def toString = s"Ref($value)"
}
def apply[T](value0: T) = live(() => value0)
}
/**
* Nice pattern matching for chained exceptions
*/
object Ex{
def unapplySeq(t: Throwable): Option[Seq[Throwable]] = {
def rec(t: Throwable): List[Throwable] = {
t match {
case null => Nil
case t => t :: rec(t.getCause)
}
}
Some(rec(t))
}
}
trait CodeColors{
def ident: fansi.Attrs
def `type`: fansi.Attrs
def literal: fansi.Attrs
def comment: fansi.Attrs
def keyword: fansi.Attrs
}
/**
* A set of colors used to highlight the miscellanious bits of the REPL.
* Re-used all over the place in PPrint, TPrint, syntax highlighting,
* command-echoes, etc. in order to keep things consistent
*
* @param prompt The command prompt
* @param ident Definition of top-level identifiers
* @param `type` Definition of types
* @param literal Strings, integers and other literal expressions
* @param prefix The Seq/Foo when printing a Seq(...) or case class Foo(...)
* @param selected The color of text selected in the line-editor
* @param error The color used to print error messages of all kinds
*/
case class Colors(prompt: Ref[fansi.Attrs],
ident: Ref[fansi.Attrs],
`type`: Ref[fansi.Attrs],
literal: Ref[fansi.Attrs],
prefix: Ref[fansi.Attrs],
comment: Ref[fansi.Attrs],
keyword: Ref[fansi.Attrs],
selected: Ref[fansi.Attrs],
error: Ref[fansi.Attrs],
warning: Ref[fansi.Attrs])
object Colors{
def Default = Colors(
fansi.Color.Magenta,
fansi.Color.Cyan,
fansi.Color.Green,
fansi.Color.Green,
fansi.Color.Yellow,
fansi.Color.Blue,
fansi.Color.Yellow,
fansi.Reversed.On,
fansi.Color.Red,
fansi.Color.Yellow
)
def BlackWhite = Colors(
fansi.Attrs.Empty, fansi.Attrs.Empty, fansi.Attrs.Empty, fansi.Attrs.Empty,
fansi.Attrs.Empty, fansi.Attrs.Empty, fansi.Attrs.Empty, fansi.Attrs.Empty,
fansi.Attrs.Empty, fansi.Attrs.Empty
)
}
/**
* Models a binding of a value to a typed name, and is passed into the
* REPL so it can re-create the bindings inside the REPL's scope
*/
case class Bind[T](name: String, value: T)
(implicit val typeTag: scala.reflect.runtime.universe.TypeTag[T])
object Bind{
implicit def ammoniteReplArrowBinder[T](t: (String, T))(implicit typeTag: TypeTag[T]) = {
Bind(t._1, t._2)(typeTag)
}
}
/**
* Encapsulates the ways the Ammonite REPL prints things. Does not print
* a trailing newline by default; you have to add one yourself.
*
* @param out How you want it to print streaming fragments of stdout
* @param warning How you want it to print a compile warning
* @param error How you want it to print a compile error
* @param info How you want to print compile info logging. *Not* the same
* as `out`, which is used to print runtime output.
*/
case class Printer(out: String => Unit,
warning: String => Unit,
error: String => Unit,
info: String => Unit)
case class ImportTree(prefix: Seq[String],
mappings: Option[ImportTree.ImportMapping],
start: Int,
end: Int)
object ImportTree{
type ImportMapping = Seq[(String, Option[String])]
} | alexarchambault/ammonium | amm/util/src/main/scala/ammonite/util/Model.scala | Scala | mit | 10,876 |
package feature.step
import feature.FeatureTest
import feature.FeatureTest.driver
import feature.FeatureTest.baseUrl
import cucumber.api.scala.{ ScalaDsl, EN }
import org.scalatest.Matchers
import org.openqa.selenium.By
import org.openqa.selenium.WebElement
import cucumber.api.PendingException
import collection.JavaConversions._
import org.openqa.selenium.Keys
import feature.support.RandomData.randomString
import feature.support.RandomData.randomEmail
import code.model._
class Step extends ScalaDsl with EN with Matchers {
var userEmail: String = null
var password: String = null
Given("""^I am an Unauthenticated user$""") { () =>
userEmail = null
password = null
driver.manage.deleteAllCookies
}
Given("""^I am a New user$""") { () =>
driver.manage.deleteAllCookies
userEmail = randomEmail
password = "abc123"
createNewUser(userEmail, password)
}
Given("""^I am a User$""") { () =>
driver.manage.deleteAllCookies
userEmail = randomEmail
password = "abc123"
createNewUser(userEmail, password)
}
When("""^I log in$""") { () =>
login(userEmail, password)
}
When("""^I go to Timeadmin$""") { () =>
driver.get(baseUrl)
}
Then("""^I see the Login page$""") { () =>
driver.findElement(By.cssSelector(".loginScreen"));
}
Then("""^I can not see the Client pages$""") { () =>
mainMenu should contain noneOf ("Tasks", "Timesheet", "Tasksheet")
}
Then("""^I can see the Client pages$""") { () =>
mainMenu should contain allOf ("Tasks", "Timesheet", "Tasksheet")
}
Then("""^I can not see the Admin pages$""") { () =>
mainMenu should contain noneOf ("Projects", "Users")
}
Then("""^I can see the Admin pages$""") { () =>
mainMenu should contain allOf ("Projects", "Users")
}
Then("""^I can not see the User pages$""") { () =>
userMenu should contain noneOf ("Change password", "My profile", "Logout")
}
Then("""^I can see the User pages$""") { () =>
userMenu should contain allOf ("Change password", "My profile", "Logout")
}
Then("""^I can see the Registration page$""") { () =>
userMenu should contain("Registration")
}
Then("""^I see a nice welcome message$""") { () =>
driver.findElement(By.cssSelector(".freshUserWarning"));
}
When("""^I register an account$""") { () =>
createNewUser(randomEmail, "abc123")
}
When("""^an administrator grant me Client permission$""") { () =>
login("default@tar.hu", "abc123")
navigateInMainMenu("Users")
switchClientRole(userEmail)
submit
}
When("""^an administrator grant me Admin permission$""") { () =>
login("default@tar.hu", "abc123")
navigateInMainMenu("Users")
switchAdminRole(userEmail)
submit
}
def mainMenu =
elementsAsText("#navbar .navbar-nav > li *")
def userMenu = {
openUserMenu
elementsAsText("#navbar .userPageLink")
}
def elementsAsText(cssSelector: String) =
for (menuItem <- driver.findElements(By.cssSelector(cssSelector))) yield menuItem.getAttribute("innerHTML")
def openUserMenu =
driver.findElement(By.cssSelector("#navbar > ul.nav.navbar-nav.navbar-right .dropdown .dropdown-toggle")).click
def createNewUser(email: String, password: String) = {
driver.get(baseUrl)
navigateInUserMenu("Registration")
driver.findElement(By.id("txtFirstName")).sendKeys(randomString)
driver.findElement(By.id("txtLastName")).sendKeys(randomString)
driver.findElement(By.id("txtEmail")).sendKeys(email)
driver.findElement(By.id("txtLocale")).sendKeys("english")
for (passwordField <- driver.findElements(By.xpath("//input[@type='password']"))) {
passwordField.clear
passwordField.sendKeys(password)
}
submit
}
def login(email: String, password: String) = {
driver.manage.deleteAllCookies
driver.get(baseUrl)
driver.findElement(By.cssSelector("input[name='username']")).sendKeys(email)
driver.findElement(By.cssSelector("input[type='password']")).sendKeys(password)
submit
}
def navigateInMainMenu(pageName: String) =
driver.findElement(By.cssSelector("#navbar .navbar-nav")).findElement(By.xpath(".//a[contains(text(), '" + pageName + "')]")).click
def navigateInUserMenu(pageName: String) = {
openUserMenu
driver.findElement(By.cssSelector("#navbar .navbar-right .dropdown-menu")).findElement(By.xpath(".//a[contains(text(), '" + pageName + "')]")).click
}
def switchAdminRole(email: String) =
driver.findElement(By.xpath("//a[contains(text(), '" + email + "')]/../..//td[position()=2]/input")).click
def switchClientRole(email: String) =
driver.findElement(By.xpath("//a[contains(text(), '" + email + "')]/../..//td[position()=3]/input")).click
def submit =
driver.findElement(By.xpath("//input[@type='submit']")).click
def setText(webElement: WebElement, text: String) = {
webElement.clear
webElement.sendKeys(text)
}
var newFirstName: String = null
var newLastName: String = null
When("""^I change my first and last name$""") { () =>
navigateInUserMenu("My profile")
newFirstName = randomString
newLastName = randomString
setText(driver.findElement(By.id("txtFirstName")), newFirstName)
setText(driver.findElement(By.id("txtLastName")), newLastName)
submit
}
Then("""^my first and last name should be updated$""") { () =>
val displayedName = driver.findElement(By.cssSelector(".ActualUserName")).getAttribute("innerHTML")
displayedName should include(newFirstName + " " + newLastName)
}
When("""^I change the localization to Hungarian$""") { () =>
navigateInUserMenu("My profile")
driver.findElement(By.id("txtLocale")).sendKeys("magyar")
submit
}
Then("""^the text on the user interface should appear in that language$""") { () =>
userMenu should contain("Kilépés")
userMenu should not contain ("Logout")
}
When("""^I change my e-mail address$""") { () =>
userEmail = randomEmail
navigateInUserMenu("My profile")
setText(driver.findElement(By.id("txtEmail")), userEmail)
submit
}
Then("""^I can log in with my new e-mail address$""") { () =>
login(userEmail, password)
userMenu should contain("Logout")
}
When("""^I change my password$""") { () =>
navigateInUserMenu("Change password")
val passwordFields = driver.findElements(By.xpath("//input[@type='password']"))
setText(passwordFields.get(0), password)
password = randomString
setText(passwordFields.get(1), password)
setText(passwordFields.get(2), password)
submit
}
Then("""^I can log in with my new password$""") { () =>
login(userEmail, password)
userMenu should contain("Logout")
}
lazy val adminRole = Role.find(net.liftweb.mapper.By(Role.name, "admin")).openOrThrowException("No admin role!")
lazy val clientRole = Role.find(net.liftweb.mapper.By(Role.name, "client")).openOrThrowException("No admin role!")
var someUsers: List[User] = null
Given("""^that there are Registered users$""") { () =>
val someAdmin = randomUser
UserRoles.create.user(someAdmin).role(adminRole).save
val someClient = randomUser
UserRoles.create.user(someClient).role(clientRole).save
val someAdminClient = randomUser
UserRoles.create.user(someAdminClient).role(adminRole).save
UserRoles.create.user(someAdminClient).role(clientRole).save
val someNew = randomUser
someUsers = List(someAdmin, someClient, someAdminClient, someNew)
}
When("""^I am on the Users page$""") { () =>
login("default@tar.hu", "abc123")
navigateInMainMenu("Users")
}
Then("""^I see the registered users$""") { () =>
for (user <- someUsers) {
driver.findElement(By.xpath("//a[contains(text(), '" + user.email + "')]"))
}
}
var someUser: User = null
Given("""^there is a Registered user$""") { () =>
someUser = randomUser
}
When("""^I select the User$""") { () =>
driver.findElement(By.xpath("//a[contains(text(), '" + someUser.email + "')]")).click
}
Then("""^I can see the details of that user$""") { () =>
driver.findElement(By.cssSelector(".usersPage"))
val firstNameFieldValue = driver.findElement(By.id("txtFirstName")).getAttribute("value")
firstNameFieldValue should equal(someUser.firstName.get)
}
var usersNewFirstName: String = null
When("""^I modify the User$""") { () =>
usersNewFirstName = randomString
setText(driver.findElement(By.id("txtFirstName")), usersNewFirstName)
submit
}
Then("""^the user data should be modified$""") { () =>
driver.findElement(By.xpath("//a[contains(text(), '" + usersNewFirstName + "')]"))
}
var someNewUser: User = null
Given("""^there is a New user$""") { () =>
someNewUser = randomUser
}
When("""^I add Client role to the User$""") { () =>
switchClientRole(someNewUser.email.get)
submit
}
Then("""^the user can see the Client pages$""") { () =>
login(someNewUser.email.get, "abc123")
mainMenu should contain allOf ("Tasks", "Tasksheet", "Timesheet")
}
When("""^I add Admin role to the User$""") { () =>
switchAdminRole(someNewUser.email.get)
submit
}
Then("""^the user can see the Admin pages$""") { () =>
login(someNewUser.email.get, "abc123")
mainMenu should contain allOf ("Projects", "Users")
}
Given("""^there is a Client user$""") { () =>
someNewUser = randomUser
UserRoles.create.user(someNewUser).role(clientRole).save
}
When("""^I revoke the User's Client role$""") { () =>
switchClientRole(someNewUser.email.get)
submit
}
Then("""^the user can not see the Client pages$""") { () =>
login(someNewUser.email.get, "abc123")
mainMenu should contain noneOf ("Tasks", "Tasksheet", "Timesheet")
}
Given("""^there is an Admin user$""") { () =>
someNewUser = randomUser
UserRoles.create.user(someNewUser).role(adminRole).save
}
When("""^I revoke the User's Admin role$""") { () =>
switchAdminRole(someNewUser.email.get)
submit
}
Then("""^the user can not see the Admin pages$""") { () =>
login(someNewUser.email.get, "abc123")
mainMenu should contain noneOf ("Projects", "Users")
}
def randomUser = {
User.create
.firstName(randomString)
.lastName(randomString)
.email(randomEmail)
.password("abc123")
.validated(true)
.superUser(true)
.saveMe()
}
}
| dodie/time-admin | src/e2e/scala/feature/step/Step.scala | Scala | apache-2.0 | 10,431 |
package test
trait X { def foo(x: Int): Int; def bar = foo(2) }
trait XX extends X
object test {
val x: X = (x: Int) => 2 // should be a closure
val xx: XX = (x: Int) => 2 // should be a closure, but blows up in backend
}
| folone/dotty | tests/pos/simplesams.scala | Scala | bsd-3-clause | 230 |
package org.quantintel.ql.time.daycounters
import org.scalatest.{FlatSpec, Matchers}
/**
* Created by Paul Bernard on 8/4/14.
*
* 1. 01/31/90 - 03/16/91
* 2. 05/06/94 - 10/30/94
* 3. 01/01/93 - 02/21/93
* 4. 02/01/93 - 03/01/93
* 5. 02/01/96 - 03/01/96
* 6. 01/01/93 - 01/01/94
* 7. 01/15/93 - 02/01/93
* 8. 02/15/93 - 04/01/93
* 9. 03/31/93 - 04/30/93
* 10. 03/31/93 - 12/31/93
* 11. 03/15/93 - 06/15/93
* 12. 11/01/93 - 03/01/94
* 13. 12/31/93 - 02/01/94
* 14. 07/15/93 - 09/15/93
* 15. 08/21/93 - 04/11/94
* 16. 03/31/93 - 04/01/93
* 17. 12/15/93 - 12/31/93
* 18. 12/15/93 - 12/30/93
*
*/
class ActActAFBTest extends FlatSpec with Matchers {
import org.quantintel.lang.numeric._
import org.quantintel.ql.time.Date
import org.quantintel.ql.time.daycounters.ActualActualConvention.AFB
"1. 01/31/1990 - 03/16/1991" should "be 1.120547945" in {
val d1 = new Date(31, 1, 1990)
val d2 = new Date(16, 3, 1991)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 1.120547945)
}
"2. 05/06/1994 - 10/30/1994" should "be 0.484931507" in {
val d1 = new Date(6, 5, 1994)
val d2 = new Date(30, 10, 1994)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 0.484931507)
}
"3. 01//01/1993 - 02/21/1993" should "be 0.139726027" in {
val d1 = new Date(1, 1, 1993)
val d2 = new Date(21, 2, 1993)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 0.139726027)
}
"4. 02/01/1993 - 03/01/1993" should "be 0.076712329" in {
val d1 = new Date(1, 2, 1993)
val d2 = new Date(1, 3, 1993)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 0.076712329)
}
"5. 02/01/1996 - 03/01/1996" should "be 0.753424658" in {
val d1 = new Date(1, 2, 1996)
val d2 = new Date(1, 3, 1996)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 0.079234973)
}
"6. 01/01/1993 - 01/01/1994" should "be 1.000000000" in {
val d1 = new Date(1, 1, 1993)
val d2 = new Date(1, 1, 1994)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 1.000000000)
}
"7. 01/15/1993 - 02/01/1993" should "be 0.046575342" in {
val d1 = new Date(15, 1, 1993)
val d2 = new Date(1, 2, 1993)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 0.046575342)
}
"8. 02/15/1993 - 04/01/1993" should "be 0.123287671" in {
val d1 = new Date(15, 2, 1993)
val d2 = new Date(1, 4, 1993)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 0.123287671)
}
"9. 03/31/1993 to 04/30/1993" should "be 0.082191781" in {
val d1 = new Date(31, 3, 1993)
val d2 = new Date(30, 4, 1993)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 0.082191781)
}
"10. 03/31/1993 - 12/31/1993" should "be 0.753424658" in {
val d1 = new Date(31, 3, 1993)
val d2 = new Date(31, 12, 1993)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 0.753424658)
}
"11. 03/15/1993 - 06/15/1993" should "be 0.252054795" in {
val d1 = new Date(15, 3, 1993)
val d2 = new Date(15, 6, 1993)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 0.252054795)
}
"12. 11/01/1993 - 03/01/1994" should "be 0.328767123" in {
val d1 = new Date(1, 11, 1993)
val d2 = new Date(1, 3, 1994)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 0.328767123)
}
"13. 12/31/1993 - 02/01/1994" should "be 0.087671233" in {
val d1 = new Date(31, 12, 1993)
val d2 = new Date(1, 2, 1994)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 0.087671233)
}
"14. 07/15/1993 - 09/15/1993" should "be 0.169863014" in {
val d1 = new Date(15, 7, 1993)
val d2 = new Date(15, 9, 1993)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 0.169863014)
}
"15. 08/21/1993 - 04/11/1994" should "be 0.638356164" in {
val d1 = new Date(21, 8, 1993)
val d2 = new Date(11, 4, 1994)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 0.638356164)
}
"16. 03/31/1993 - 04/01/1993" should "be 0.002739726" in {
val d1 = new Date(31, 3, 1993)
val d2 = new Date(1, 4, 1993)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 0.002739726)
}
"17. 12/15/1993 - 12/31/1993" should "be 0.043835616" in {
val d1 = new Date(15, 12, 1993)
val d2 = new Date(31, 12, 1993)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 0.043835616)
}
"18. 12/15/1993 - 12/30/1993" should "be 0.041095890" in {
val d1 = new Date(15, 12, 1993)
val d2 = new Date(30, 12, 1993)
val yf :Double = ActualActual(AFB).yearFraction(d1, d2, null, null)
assert(yf.rounded(9) == 0.041095890)
}
}
| quantintel/spectrum | financial/src/test/scala/org/quantintel/ql/time/daycounters/ActActAFBTest.scala | Scala | apache-2.0 | 5,413 |
package db
import lib.{DatabaseServiceFetcher, ServiceConfiguration}
import builder.OriginalValidator
import com.bryzek.apidoc.api.v0.models.{ApplicationForm, OriginalType, Version, Visibility}
import com.bryzek.apidoc.spec.v0.models.{Application, Organization, Service}
import com.bryzek.apidoc.spec.v0.models.json._
import org.scalatest.{FunSpec, Matchers}
import java.util.UUID
import play.api.libs.json.{Json, JsObject}
class VersionsDaoSpec extends FunSpec with Matchers with util.TestApplication {
private[this] val Original = com.bryzek.apidoc.api.v0.models.Original(
`type` = OriginalType.ApiJson,
data = Json.obj(
"apidoc" -> Json.obj(
"version" -> com.bryzek.apidoc.spec.v0.Constants.Version
),
"name" -> s"test-${UUID.randomUUID}"
).toString
)
private[this] def createApplication(key: String = "test-" + UUID.randomUUID.toString): com.bryzek.apidoc.api.v0.models.Application = {
Util.createApplication(
org = Util.testOrg,
form = Util.createApplicationForm().copy(key = Some(key))
)
}
describe("with an application") {
val applicationKey = "test-" + UUID.randomUUID.toString
val application: com.bryzek.apidoc.api.v0.models.Application = createApplication(applicationKey)
val service = Util.createService(application)
it("create") {
val version = versionsDao.create(Util.createdBy, application, "1.0.0", Original, service)
Util.createVersion().version should be("1.0.0")
}
it("findByApplicationAndVersion") {
versionsDao.create(Util.createdBy, application, "1.0.1", Original, service)
versionsDao.findByApplicationAndVersion(Authorization.All, application, "1.0.1").map(_.service) should be(Some(service))
}
it("soft delete") {
val version1 = versionsDao.create(Util.createdBy, application, "1.0.2", Original, service)
versionsDao.softDelete(Util.createdBy, version1)
val version2 = versionsDao.create(Util.createdBy, application, "1.0.2", Original, service)
version2.copy(
guid = version1.guid,
audit = version1.audit
) should be(version1)
version2.guid shouldNot be(version1.guid)
}
}
it("sorts properly") {
val app = createApplication()
val service = Util.createService(app)
val version1 = versionsDao.create(Util.createdBy, app, "1.0.2", Original, service)
val version2 = versionsDao.create(Util.createdBy, app, "1.0.2-dev", Original, service)
versionsDao.findAll(
Authorization.All,
applicationGuid = Some(app.guid)
).map(_.version) should be(Seq("1.0.2", "1.0.2-dev"))
}
it("can parse original") {
val app = createApplication()
val service = Util.createService(app)
val version = versionsDao.create(Util.createdBy, app, "1.0.2", Original, service)
val serviceConfig = ServiceConfiguration(
orgKey = "test",
orgNamespace = "test.apidoc",
version = "0.0.2"
)
val validator = OriginalValidator(
config = serviceConfig,
original = version.original.getOrElse {
sys.error("Missing original")
},
fetcher = DatabaseServiceFetcher(Authorization.All)
)
validator.validate match {
case Left(errors) => fail(errors.mkString("\\n"))
case Right(_) => {}
}
}
it("trims version number") {
val app = createApplication()
val service = Util.createService(app)
val version = versionsDao.create(Util.createdBy, app, " 1.0.2\\n ", Original, service)
version.version should be("1.0.2")
}
}
| Seanstoppable/apidoc | api/test/db/VersionsDaoSpec.scala | Scala | mit | 3,544 |
package org.jetbrains.plugins.cbt.project.data.service
import com.intellij.openapi.externalSystem.model.DataNode
import com.intellij.openapi.externalSystem.model.project.ProjectData
import com.intellij.openapi.externalSystem.service.project.IdeModifiableModelsProvider
import com.intellij.openapi.project.Project
import com.intellij.openapi.projectRoots.{JavaSdk, ProjectJdkTable}
import com.intellij.openapi.roots.ProjectRootManager
import org.jetbrains.plugins.cbt.project.data.service
import org.jetbrains.plugins.cbt.structure.CbtProjectData
import org.jetbrains.sbt.project.data.service.{AbstractDataService, AbstractImporter, Importer}
class CbtProjectDataService extends AbstractDataService[CbtProjectData, Project](CbtProjectData.Key) {
override def createImporter(toImport: Seq[DataNode[CbtProjectData]],
projectData: ProjectData,
project: Project,
modelsProvider: IdeModifiableModelsProvider): Importer[CbtProjectData] =
new service.CbtProjectDataService.Importer(toImport, projectData, project, modelsProvider)
}
object CbtProjectDataService {
private class Importer(toImport: Seq[DataNode[CbtProjectData]],
projectData: ProjectData,
project: Project,
modelsProvider: IdeModifiableModelsProvider)
extends AbstractImporter[CbtProjectData](toImport, projectData, project, modelsProvider) {
override def importData(): Unit = {
dataToImport.foreach(node => doImport(node.getData))
}
private def doImport(dataNode: CbtProjectData): Unit = executeProjectChangeAction {
val javaSdk = ProjectJdkTable.getInstance().findMostRecentSdkOfType(JavaSdk.getInstance)
ProjectRootManager.getInstance(project).setProjectSdk(javaSdk)
}
}
} | triplequote/intellij-scala | cbt/src/org/jetbrains/plugins/cbt/project/data/service/CbtProjectDataService.scala | Scala | apache-2.0 | 1,851 |
package is.hail.annotations
abstract class UnsafeOrdering extends Ordering[Long] with Serializable {
def compare(o1: Long, o2: Long): Int
def compare(rv1: RegionValue, rv2: RegionValue): Int =
compare(rv1.offset, rv2.offset)
def compare(rv1: RegionValue, r2: Region, o2: Long): Int =
compare(rv1.offset, o2)
def compare(r1: Region, o1: Long, rv2: RegionValue): Int =
compare(o1, rv2.offset)
def toRVOrdering: Ordering[RegionValue] = on[RegionValue](rv => rv.offset)
}
| cseed/hail | hail/src/main/scala/is/hail/annotations/UnsafeOrdering.scala | Scala | mit | 495 |
package org.ensime.protocol
import java.io._
import org.ensime.config.{ProjectConfig, DebugConfig, ReplConfig}
import org.ensime.debug.{DebugUnit, DebugSourceLinePairs}
import org.ensime.model._
import org.ensime.server._
import org.ensime.util._
import org.ensime.util.SExp._
import scala.actors._
import scala.tools.nsc.util.{Position, RangePosition}
import scala.tools.refactoring.common.Change
import scala.util.parsing.input
object SwankProtocol extends SwankProtocol {}
trait SwankProtocol extends Protocol {
import SwankProtocol._
import ProtocolConst._
val PROTOCOL_VERSION: String = "0.0.1"
val SERVER_NAME: String = "ENSIMEserver"
private var outPeer: Actor = null;
private var rpcTarget: RPCTarget = null;
def peer = outPeer
def setOutputActor(peer: Actor) { outPeer = peer }
def setRPCTarget(target: RPCTarget) { this.rpcTarget = target }
// Handle reading / writing of messages
def writeMessage(value: WireFormat, out: Writer) {
val data: String = value.toWireString
val header: String = String.format("%06x", int2Integer(data.length))
val msg = header + data
println("Writing: " + msg)
out.write(msg)
out.flush()
}
private def fillArray(in: java.io.Reader, a: Array[Char]) {
var n = 0
var l = a.length
var charsRead = 0;
while (n < l) {
charsRead = in.read(a, n, l - n)
if (charsRead == -1) {
throw new EOFException("End of file reached in socket reader.");
} else {
n += charsRead
}
}
}
private val headerBuf = new Array[Char](6);
def readMessage(in: java.io.Reader): WireFormat = {
fillArray(in, headerBuf)
val msglen = Integer.valueOf(new String(headerBuf), 16).intValue()
if (msglen > 0) {
//TODO allocating a new array each time is inefficient!
val buf: Array[Char] = new Array[Char](msglen);
fillArray(in, buf)
SExp.read(new input.CharArrayReader(buf))
} else {
throw new IllegalStateException("Empty message read from socket!")
}
}
def sendBackgroundMessage(code: Int, detail: Option[String]) {
sendMessage(SExp(
key(":background-message"),
code,
detail.map(strToSExp).getOrElse(NilAtom())))
}
def handleIncomingMessage(msg: Any) {
msg match {
case sexp: SExp => handleMessageForm(sexp)
case _ => System.err.println("WTF: Unexpected message: " + msg)
}
}
private def handleMessageForm(sexp: SExp) {
sexp match {
case SExpList(KeywordAtom(":swank-rpc") :: form :: IntAtom(callId) :: rest) => {
handleEmacsRex(form, callId)
}
case _ => {
sendProtocolError(ErrUnrecognizedForm, Some(sexp.toReadableString))
}
}
}
private def handleEmacsRex(form: SExp, callId: Int) {
form match {
case SExpList(SymbolAtom(name) :: rest) => {
try {
handleRPCRequest(name, form, callId)
} catch {
case e: Throwable =>
{
e.printStackTrace(System.err)
sendRPCError(ErrExceptionInRPC, Some(e.getMessage), callId)
}
}
}
case _ => {
sendRPCError(
ErrMalformedRPC,
Some("Expecting leading symbol in: " + form),
callId)
}
}
}
private def handleRPCRequest(callType: String, form: SExp, callId: Int) {
println("\nHandling RPC: " + form)
def oops = sendRPCError(ErrMalformedRPC, Some("Malformed " + callType + " call: " + form), callId)
callType match {
case "swank:connection-info" => {
sendConnectionInfo(callId)
}
case "swank:init-project" => {
form match {
case SExpList(head ::(conf: SExpList) :: body) => {
val config = ProjectConfig.fromSExp(conf)
rpcTarget.rpcInitProject(config, callId)
}
case _ => oops
}
}
case "swank:peek-undo" => {
rpcTarget.rpcPeekUndo(callId)
}
case "swank:exec-undo" => {
form match {
case SExpList(head ::(IntAtom(id)) :: body) => {
rpcTarget.rpcExecUndo(id, callId)
}
case _ => oops
}
}
case "swank:repl-config" => {
rpcTarget.rpcReplConfig(callId)
}
case "swank:builder-init" => {
rpcTarget.rpcBuilderInit(callId)
}
case "swank:builder-add-files" => {
form match {
case SExpList(head :: SExpList(filenames) :: body) => {
val files = filenames.map(_.toString)
rpcTarget.rpcBuilderAddFiles(files, callId)
}
case _ => oops
}
}
case "swank:builder-update-files" => {
form match {
case SExpList(head :: SExpList(filenames) :: body) => {
val files = filenames.map(_.toString)
rpcTarget.rpcBuilderUpdateFiles(files, callId)
}
case _ => oops
}
}
case "swank:builder-remove-files" => {
form match {
case SExpList(head :: SExpList(filenames) :: body) => {
val files = filenames.map(_.toString)
rpcTarget.rpcBuilderRemoveFiles(files, callId)
}
case _ => oops
}
}
case "swank:debug-config" => {
rpcTarget.rpcDebugConfig(callId)
}
case "swank:debug-unit-info" => {
form match {
case SExpList(head :: StringAtom(sourceName) :: IntAtom(line) :: StringAtom(packPrefix) :: body) => {
rpcTarget.rpcDebugUnitInfo(sourceName, line, packPrefix, callId)
}
case _ => oops
}
}
case "swank:debug-class-locs-to-source-locs" => {
form match {
case SExpList(head :: SExpList(pairs) :: body) => {
val nameLinePairs = pairs.flatMap {
case SExpList((classname: StringAtom) ::(line: IntAtom) :: body) => {
Some(classname.toString, line.value)
}
case _ => Some("", -1)
}
rpcTarget.rpcDebugClassLocsToSourceLocs(nameLinePairs, callId)
}
case _ => oops
}
}
case "swank:remove-file" => {
form match {
case SExpList(head :: StringAtom(file) :: body) => {
rpcTarget.rpcRemoveFile(file, callId)
}
case _ => oops
}
}
case "swank:typecheck-file" => {
form match {
case SExpList(head :: StringAtom(file) :: body) => {
rpcTarget.rpcTypecheckFile(file, callId)
}
case _ => oops
}
}
case "swank:typecheck-all" => {
rpcTarget.rpcTypecheckAll(callId)
}
case "swank:scope-completion" => {
form match {
case SExpList(head :: StringAtom(file) :: IntAtom(point) :: StringAtom(prefix) :: BooleanAtom(constructor) :: body) => {
rpcTarget.rpcScopeCompletion(file, point, prefix, constructor, callId)
}
case _ => oops
}
}
case "swank:type-completion" => {
form match {
case SExpList(head :: StringAtom(file) :: IntAtom(point) :: StringAtom(prefix) :: body) => {
rpcTarget.rpcTypeCompletion(file, point, prefix, callId)
}
case _ => oops
}
}
case "swank:import-suggestions" => {
form match {
case SExpList(head :: StringAtom(file) :: IntAtom(point) :: SExpList(names) :: IntAtom(maxResults) :: body) => {
rpcTarget.rpcImportSuggestions(file, point,
names.map(_.toString).toList, maxResults, callId)
}
case _ => oops
}
}
case "swank:public-symbol-search" => {
form match {
case SExpList(head :: SExpList(names) :: IntAtom(maxResults) :: body) => {
rpcTarget.rpcPublicSymbolSearch(
names.map(_.toString).toList, maxResults, callId)
}
case _ => oops
}
}
case "swank:uses-of-symbol-at-point" => {
form match {
case SExpList(head :: StringAtom(file) :: IntAtom(point) :: body) => {
rpcTarget.rpcUsesOfSymAtPoint(file, point, callId)
}
case _ => oops
}
}
case "swank:package-member-completion" => {
form match {
case SExpList(head :: StringAtom(path) :: StringAtom(prefix) :: body) => {
rpcTarget.rpcPackageMemberCompletion(path, prefix, callId)
}
case _ => oops
}
}
case "swank:inspect-type-at-point" => {
form match {
case SExpList(head :: StringAtom(file) :: IntAtom(point) :: body) => {
rpcTarget.rpcInspectTypeAtPoint(file, point, callId)
}
case _ => oops
}
}
case "swank:inspect-type-by-id" => {
form match {
case SExpList(head :: IntAtom(id) :: body) => {
rpcTarget.rpcInspectTypeById(id, callId)
}
case _ => oops
}
}
case "swank:symbol-at-point" => {
form match {
case SExpList(head :: StringAtom(file) :: IntAtom(point) :: body) => {
rpcTarget.rpcSymbolAtPoint(file, point, callId)
}
case _ => oops
}
}
case "swank:type-by-id" => {
form match {
case SExpList(head :: IntAtom(id) :: body) => {
rpcTarget.rpcTypeById(id, callId)
}
case _ => oops
}
}
case "swank:type-by-name" => {
form match {
case SExpList(head :: StringAtom(name) :: body) => {
rpcTarget.rpcTypeByName(name, callId)
}
case _ => oops
}
}
case "swank:type-by-name-at-point" => {
form match {
case SExpList(head :: StringAtom(name) :: StringAtom(file) :: IntAtom(point) :: body) => {
rpcTarget.rpcTypeByNameAtPoint(name, file, point, callId)
}
case _ => oops
}
}
case "swank:call-completion" => {
form match {
case SExpList(head :: IntAtom(id) :: body) => {
rpcTarget.rpcCallCompletion(id, callId)
}
case _ => oops
}
}
case "swank:type-at-point" => {
form match {
case SExpList(head :: StringAtom(file) :: IntAtom(point) :: body) => {
rpcTarget.rpcTypeAtPoint(file, point, callId)
}
case _ => oops
}
}
case "swank:inspect-package-by-path" => {
form match {
case SExpList(head :: StringAtom(path) :: body) => {
rpcTarget.rpcInspectPackageByPath(path, callId)
}
case _ => oops
}
}
case "swank:perform-refactor" => {
form match {
case SExpList(head :: IntAtom(procId) :: SymbolAtom(tpe) ::(params: SExp) :: BooleanAtom(interactive) :: body) => {
rpcTarget.rpcPerformRefactor(Symbol(tpe), procId,
listOrEmpty(params).toSymbolMap, interactive, callId)
}
case _ => oops
}
}
case "swank:exec-refactor" => {
form match {
case SExpList(head :: IntAtom(procId) :: SymbolAtom(tpe) :: body) => {
rpcTarget.rpcExecRefactor(Symbol(tpe), procId, callId)
}
case _ => oops
}
}
case "swank:cancel-refactor" => {
form match {
case SExpList(head :: IntAtom(procId) :: body) => {
rpcTarget.rpcCancelRefactor(procId, callId)
}
case _ => oops
}
}
case "swank:format-source" => {
form match {
case SExpList(head :: SExpList(filenames) :: body) => {
val files = filenames.map(_.toString)
rpcTarget.rpcFormatFiles(files, callId)
}
case _ => oops
}
}
case "swank:expand-selection" => {
form match {
case SExpList(head :: StringAtom(filename) :: IntAtom(start) :: IntAtom(end) :: body) => {
rpcTarget.rpcExpandSelection(filename, start, end, callId)
}
case _ => oops
}
}
case other => {
sendRPCError(
ErrUnrecognizedRPC,
Some("Unknown :swank-rpc call: " + other),
callId)
}
}
}
def listOrEmpty(list: SExp): SExpList = {
list match {
case l: SExpList => l
case _ => SExpList(List())
}
}
def sendRPCAckOK(callId: Int) {
sendRPCReturn(true, callId)
}
def sendRPCReturn(value: WireFormat, callId: Int) {
value match {
case sexp: SExp =>
{
sendMessage(SExp(
key(":return"),
SExp(key(":ok"), sexp),
callId))
}
case _ => throw new IllegalStateException("Not a SExp: " + value)
}
}
def sendRPCError(code: Int, detail: Option[String], callId: Int) {
sendMessage(SExp(
key(":return"),
SExp(key(":abort"),
code,
detail.map(strToSExp).getOrElse(NilAtom())),
callId))
}
def sendProtocolError(code: Int, detail: Option[String]) {
sendMessage(
SExp(
key(":reader-error"),
code,
detail.map(strToSExp).getOrElse(NilAtom())))
}
/*
* A sexp describing the server configuration, per the Swank standard.
*/
def sendConnectionInfo(callId: Int) = {
val info = SExp(
key(":pid"), 'nil,
key(":server-implementation"),
SExp(
key(":name"), SERVER_NAME),
key(":machine"), 'nil,
key(":features"), 'nil,
key(":version"), PROTOCOL_VERSION)
sendRPCReturn(info, callId)
}
def sendCompilerReady() = sendMessage(SExp(key(":compiler-ready"), true))
def sendIndexerReady() = sendMessage(SExp(key(":indexer-ready"), true))
def sendTypeCheckResult(notelist: NoteList) = {
sendMessage(SExp(key(":typecheck-result"), toWF(notelist)))
}
object SExpConversion {
implicit def posToSExp(pos: Position): SExp = {
if (pos.isDefined) {
SExp.propList((":file", pos.source.path), (":offset", pos.point))
} else {
'nil
}
}
implicit def posToSExp(pos: RangePosition): SExp = {
if (pos.isDefined) {
SExp.propList(
(":file", pos.source.path),
(":offset", pos.point),
(":start", pos.start),
(":end", pos.end))
} else {
'nil
}
}
}
import SExpConversion._
def toWF(config: ProjectConfig): SExp = {
SExp(
key(":project-name"), config.name.map(StringAtom).getOrElse('nil),
key(":source-roots"), SExp(config.sourceRoots.map { f => StringAtom(f.getPath) }))
}
def toWF(config: ReplConfig): SExp = {
SExp.propList((":classpath", strToSExp(config.classpath)))
}
def toWF(config: DebugConfig): SExp = {
SExp.propList(
(":classpath", strToSExp(config.classpath)),
(":sourcepath", strToSExp(config.sourcepath)))
}
def toWF(unit: DebugUnit): SExp = {
SExp.propList(
(":full-name", strToSExp(unit.classQualName)),
(":package", strToSExp(unit.packageName)),
(":start-line", intToSExp(unit.startLine)),
(":end-line", intToSExp(unit.endLine)))
}
def toWF(value: Boolean): SExp = {
if (value) TruthAtom()
else NilAtom()
}
def toWF(value: Null): SExp = {
NilAtom()
}
def toWF(value: String): SExp = {
StringAtom(value)
}
def toWF(value: DebugSourceLinePairs): SExp = {
SExpList(value.pairs.map { p => SExp(p._1, p._2) })
}
def toWF(note: Note): SExp = {
SExp(
key(":severity"), note.friendlySeverity,
key(":msg"), note.msg,
key(":beg"), note.beg,
key(":end"), note.end,
key(":line"), note.line,
key(":col"), note.col,
key(":file"), note.file)
}
def toWF(notelist: NoteList): SExp = {
val NoteList(isFull, notes) = notelist
SExp(
key(":is-full"),
toWF(isFull),
key(":notes"),
SExpList(notes.map(toWF)))
}
def toWF(values: Iterable[WireFormat]): SExp = {
SExpList(values.map(ea => ea.asInstanceOf[SExp]))
}
def toWF(value: SymbolInfoLight): SExp = {
SExp.propList(
(":name", value.name),
(":type-sig", value.tpeSig),
(":type-id", value.tpeId),
(":is-callable", value.isCallable))
}
def toWF(value: PackageMemberInfoLight): SExp = {
SExp(key(":name"), value.name)
}
def toWF(value: SymbolInfo): SExp = {
SExp.propList(
(":name", value.name),
(":type", toWF(value.tpe)),
(":decl-pos", value.declPos),
(":is-callable", value.isCallable))
}
def toWF(value: FileRange): SExp = {
SExp.propList(
(":file", value.file),
(":start", value.start),
(":end", value.end))
}
def toWF(value: Position): SExp = {
posToSExp(value)
}
def toWF(value: RangePosition): SExp = {
posToSExp(value)
}
def toWF(value: NamedTypeMemberInfoLight): SExp = {
SExp.propList(
(":name", value.name),
(":type-sig", value.tpeSig),
(":type-id", value.tpeId),
(":is-callable", value.isCallable))
}
def toWF(value: NamedTypeMemberInfo): SExp = {
SExp.propList(
(":name", value.name),
(":type", toWF(value.tpe)),
(":pos", value.pos),
(":decl-as", value.declaredAs))
}
def toWF(value: EntityInfo): SExp = {
value match {
case value: PackageInfo => toWF(value)
case value: TypeInfo => toWF(value)
case value: NamedTypeMemberInfo => toWF(value)
case value: NamedTypeMemberInfoLight => toWF(value)
case value => throw new IllegalStateException("Unknown EntityInfo: " + value)
}
}
def toWF(value: TypeInfo): SExp = {
value match {
case value: ArrowTypeInfo =>
{
SExp.propList(
(":name", value.name),
(":type-id", value.id),
(":arrow-type", true),
(":result-type", toWF(value.resultType)),
(":param-sections", SExp(value.paramSections.map(toWF))))
}
case value: TypeInfo =>
{
SExp.propList((":name", value.name),
(":type-id", value.id),
(":full-name", value.fullName),
(":decl-as", value.declaredAs),
(":type-args", SExp(value.args.map(toWF))),
(":members", SExp(value.members.map(toWF))),
(":pos", value.pos),
(":outer-type-id", value.outerTypeId.map(intToSExp).getOrElse('nil)))
}
case value => throw new IllegalStateException("Unknown TypeInfo: " + value)
}
}
def toWF(value: PackageInfo): SExp = {
SExp.propList((":name", value.name),
(":info-type", 'package),
(":full-name", value.fullname),
(":members", SExpList(value.members.map(toWF))))
}
def toWF(value: CallCompletionInfo): SExp = {
SExp.propList(
(":result-type", toWF(value.resultType)),
(":param-sections", SExp(value.paramSections.map(toWF))))
}
def toWF(value: ParamSectionInfo): SExp = {
SExp.propList(
(":params", SExp(value.params.map {
case (nm, tp) => SExp(nm, toWF(tp))
})),
(":is-implicit", value.isImplicit))
}
def toWF(value: InterfaceInfo): SExp = {
SExp.propList(
(":type", toWF(value.tpe)),
(":via-view", value.viaView.map(strToSExp).getOrElse('nil)))
}
def toWF(value: TypeInspectInfo): SExp = {
SExp.propList(
(":type", toWF(value.tpe)),
(":info-type", 'typeInspect),
(":companion-id", value.companionId match {
case Some(id) => id
case None => 'nil
}), (":interfaces", SExp(value.supers.map(toWF))))
}
def toWF(value: RefactorFailure): SExp = {
SExp.propList(
(":procedure-id", value.procedureId),
(":status", 'failure),
(":reason", value.message))
}
def toWF(value: RefactorEffect): SExp = {
SExp.propList(
(":procedure-id", value.procedureId),
(":refactor-type", value.refactorType),
(":status", 'success),
(":changes", SExpList(value.changes.map(changeToWF))))
}
def toWF(value: RefactorResult): SExp = {
SExp.propList(
(":procedure-id", value.procedureId),
(":refactor-type", value.refactorType),
(":touched-files", SExpList(value.touched.map(f => strToSExp(f.getAbsolutePath)))))
}
def toWF(value: SymbolSearchResults): SExp = {
SExpList(value.syms.map(toWF))
}
def toWF(value: ImportSuggestions): SExp = {
SExpList(value.symLists.map { l => SExpList(l.map(toWF)) })
}
private def toWF(pos: Option[(String, Int)]): SExp = {
pos match{
case Some((f,o)) => SExp.propList((":file", f), (":offset", o))
case _ => 'nil
}
}
def toWF(value: SymbolSearchResult): SExp = {
value match {
case value: TypeSearchResult => {
SExp.propList(
(":name", value.name),
(":local-name", value.localName),
(":decl-as", value.declaredAs),
(":pos", toWF(value.pos)))
}
case value: MethodSearchResult => {
SExp.propList(
(":name", value.name),
(":local-name", value.localName),
(":decl-as", value.declaredAs),
(":pos", toWF(value.pos)),
(":owner-name", value.owner))
}
case value => throw new IllegalStateException("Unknown SymbolSearchResult: " + value)
}
}
def toWF(value: Undo): SExp = {
SExp.propList(
(":id", value.id),
(":changes", SExpList(value.changes.map(changeToWF))),
(":summary", value.summary))
}
def toWF(value: UndoResult): SExp = {
SExp.propList(
(":id", value.id),
(":touched-files", SExpList(value.touched.map(f => strToSExp(f.getAbsolutePath)))))
}
private def changeToWF(ch: Change): SExp = {
SExp.propList(
(":file", ch.file.path),
(":text", ch.text),
(":from", ch.from),
(":to", ch.to))
}
}
| non/ensime | src/main/scala/org/ensime/protocol/SwankProtocol.scala | Scala | gpl-3.0 | 21,857 |
package chess
import Pos._
class QueenTest extends ChessTest {
"a queen" should {
val queen = White - Queen
"move in any direction until the edge of the board" in {
pieceMoves(queen, D4) must bePoss(D5, D6, D7, D8, D3, D2, D1, E4, F4, G4, H4, C4, B4, A4, C3, B2, A1, E5, F6, G7, H8, C5, B6, A7, E3, F2, G1)
}
"move 1 position in any direction, even from the edges" in {
pieceMoves(queen, H8) must bePoss(H7, H6, H5, H4, H3, H2, H1, G7, F6, E5, D4, C3, B2, A1, G8, F8, E8, D8, C8, B8, A8)
}
"not move to positions that are occupied by the same colour" in {
val board = """
k B
N Q P
PPPPPPPP
NBQKBNR
"""
board destsFrom C4 must bePoss(board, """
k B x
x x
x x x
xxx
NxQxxxxP
xxx
PPPPPPPP
NBQKBNR
""")
}
"capture opponent pieces" in {
val board = """
k B
q
p
N QP P
PPPPPPPP
NBQKBNR
"""
board destsFrom C4 must bePoss(board, """
k B
x x
x x x
xxx
NxQP P
xxx
PPPPPPPP
NBQKBNR
""")
}
"threaten" in {
val board = """
k B
q q
p
n Q Pp
PPPPPPPP
NBQKBNR
"""
"a reachable enemy - horizontal" in {
board actorAt C4 map (_ threatens A4) must beSome(true)
}
"a reachable enemy - diagonal" in {
board actorAt C4 map (_ threatens A6) must beSome(true)
}
"an unreachable enemy" in {
board actorAt C4 map (_ threatens H4) must beSome(false)
}
"a reachable friend" in {
board actorAt C4 map (_ threatens C2) must beSome(false)
}
"nothing" in {
board actorAt C4 map (_ threatens B6) must beSome(false)
}
}
}
}
| cxd4/scalachess | src/test/scala/QueenTest.scala | Scala | mit | 1,632 |
package chandu0101.scalajs.react.components.reactselect
import japgolly.scalajs.react._
import scala.scalajs.js
@js.native
trait ValueProps[T] extends js.Object {
/* disabled prop passed to ReactSelect */
def disabled: js.UndefOr[Boolean]
/* method to handle click on value label */
def onClick: js.UndefOr[(ValueOption[T], ReactEvent) => Callback]
/* method to handle removal of the value */
def onRemove: js.UndefOr[ValueOption[T] => Callback]
/* the option object for this value */
def value: ValueOption[T]
}
| rleibman/scalajs-react-components | core/src/main/scala/chandu0101/scalajs/react/components/reactselect/ValueProps.scala | Scala | apache-2.0 | 532 |
package jsmessages.api
import play.api.i18n._
import play.api.Application
import cms.ContentManager
import cms.dto.EntryType
import cms.dto.Entry
object JsMessages {
/**
* Generates a JavaScript function able to compute localized messages.
*
* For example:
*
* {{{
* def jsMessages = Action { implicit request =>
* Ok(JsMessages(Some("window.MyMessages"))).as(JAVASCRIPT)
* }
* }}}
*
* And you can use it in your JavaScript code as follows:
* {{{
* alert(MyMessages('greeting', 'World'));
* }}}
*
* Provided you have the following message in your conf/messages file:
* {{{
* greeting=Hello {0}!
* }}}
*
* Note: This implementation does not handle quotes escaping in patterns (see http://docs.oracle.com/javase/7/docs/api/java/text/MessageFormat.html)
*
* @param namespace Optional JavaScript namespace to use to put the function definition. If not set this function will
* just generate a function. Otherwise it will generate a function and assign it to the given namespace. Note: you can
* set something like `Some("var Messages")` to use a fresh variable.
*/
def apply(namespace: Option[String] = None)(implicit app: Application, lang: Lang): String = apply(namespace, allMessages)
/**
* Generates a JavaScript function able to compute localized messages for a given keys subset.
*
* Example:
*
* {{{
* JsMessages.subset(Some("window.MyMessages"))(
* "error.required",
* "error.number"
* )
* }}}
*/
def subset(namespace: Option[String] = None)(keys: String*)(implicit app: Application, lang: Lang): String = {
val messages = (for {
key <- keys
message <- allMessages.get(key)
} yield (key, message)).toMap
apply(namespace, messages)
}
def apply(namespace: Option[String], messages: Map[String, String]): String = {
import org.apache.commons.lang3.StringEscapeUtils.escapeEcmaScript
"""%s(function(){var ms={%s}; return function(k){var m=ms[k]||k;for(var i=1;i<arguments.length;i++){m=m.replace('{'+(i-1)+'}',arguments[i])} return m}})();""".format(
namespace.map{_ + "="}.getOrElse(""),
(for ((key, msg) <- messages) yield {
"'%s':'%s'".format(escapeEcmaScript(key), escapeEcmaScript(msg.replace("''", "'")))
}).mkString(",")
)
}
private def allMessages(implicit app: Application, lang: Lang) = {
Messages.messages.get("default").getOrElse(Map.empty) ++ Messages.messages.get(lang.code).getOrElse(Map.empty)
val seq: Seq[Entry] = ContentManager.filtered(EntryType.Message)
seq.map {e => (e.key, helpers.CmsMessages.getMessage(e.key))}.toMap
}
}
| lukaszbudnik/hackaton-portal | app/jsmessages/api/JsMessages.scala | Scala | apache-2.0 | 2,715 |
package org.jetbrains.plugins.scala
package lang.xml
import org.jetbrains.plugins.scala.base.ScalaLightCodeInsightFixtureTestAdapter
import org.jetbrains.plugins.scala.codeInspection.xml.ScalaXmlUnmatchedTagInspection
/**
* User: Dmitry Naydanov
* Date: 4/13/12
*/
class XmlUnmatchedTagQuickFixesTest extends ScalaLightCodeInsightFixtureTestAdapter {
val renameOpeningQuickFixHint = ScalaBundle.message("xml.rename.opening.tag")
val renameClosingQuickFixHint = ScalaBundle.message("xml.rename.closing.tag")
val deleteUnmatchedTagHint = ScalaBundle.message("xml.delete.unmatched.tag")
private def check(text: String, assumedStub: String, hint: String) {
testQuickFix(text.replace("\\r", ""), assumedStub.replace("\\r", ""), hint, classOf[ScalaXmlUnmatchedTagInspection])
}
def testSimple() {
val text = "val xml = <a>blah</b>"
val assumedStub = "val xml = <a>blah</a>"
check(text, assumedStub, renameClosingQuickFixHint)
}
def testWithAttributes() {
val text = """val xml = <aaa attr1="1" attr2="attr2">blah blah</bbb>"""
val assumedStub = """val xml = <bbb attr1="1" attr2="attr2">blah blah</bbb>"""
check(text, assumedStub, renameOpeningQuickFixHint)
}
def testNested() {
val text =
"""
val xml = <aaa attr1="1">
<bbb>blah blah</bbb>
<ccc>
<bbb attrB="A" attrC="d">
{i + j + k}
</lll>
</ccc>
</aaa>
"""
val assumedStub =
"""
val xml = <aaa attr1="1">
<bbb>blah blah</bbb>
<ccc>
<lll attrB="A" attrC="d">
{i + j + k}
</lll>
</ccc>
</aaa>
"""
check(text, assumedStub, renameOpeningQuickFixHint)
}
def testInsideCase() {
val text =
"""
<aa></aa> match {
case <aaa><bbb>{1 + 2 + i}</ccc></aaa> =>
case _ =>
}
"""
val assumedStub =
"""
<aa></aa> match {
case <aaa><bbb>{1 + 2 + i}</bbb></aaa> =>
case _ =>
}
"""
check(text, assumedStub, renameClosingQuickFixHint)
}
}
| triggerNZ/intellij-scala | test/org/jetbrains/plugins/scala/lang/xml/XmlUnmatchedTagQuickFixesTest.scala | Scala | apache-2.0 | 2,283 |
// Copyright (c) 2013-2020 Rob Norris and Contributors
// This software is licensed under the MIT License (MIT).
// For more information see LICENSE or https://opensource.org/licenses/MIT
package doobie.issue
import cats.effect.IO
import doobie._, doobie.implicits._
class `262` extends munit.FunSuite {
import cats.effect.unsafe.implicits.global
// an interpreter that returns null when we ask for statement metadata
object Interp extends KleisliInterpreter[IO] {
val asyncM = WeakAsync[IO]
override lazy val PreparedStatementInterpreter =
new PreparedStatementInterpreter {
override def getMetaData = primitive(_ => null)
}
}
val baseXa = Transactor.fromDriverManager[IO](
"org.h2.Driver",
"jdbc:h2:mem:queryspec;DB_CLOSE_DELAY=-1",
"sa", ""
)
// A transactor that uses our interpreter above
val xa: Transactor[IO] =
Transactor.interpret.set(baseXa, Interp.ConnectionInterpreter)
test("getColumnJdbcMeta should handle null metadata") {
val prog = HC.prepareStatement("select 1")(HPS.getColumnJdbcMeta)
assertEquals(prog.transact(xa).unsafeRunSync(), Nil)
}
}
| tpolecat/doobie | modules/core/src/test/scala/doobie/issue/262.scala | Scala | mit | 1,144 |
/*
* Copyright (c) 2013 David Soergel <dev@davidsoergel.com>
* Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0
*/
package worldmake.executionstrategy
import com.typesafe.scalalogging.slf4j.Logging
import worldmake._
import scalax.file.Path
import scala.concurrent.{ExecutionContext, Future}
import scala.collection.GenMap
import worldmake.WorldMakeConfig._
import scalax.io.Resource
import java.io.File
import scala.Some
import scala.sys.process.{ProcessLogger, Process}
import ExecutionContext.Implicits.global
import scala.reflect.runtime.universe._
import worldmake.storage.{Identifier, ManagedPathArtifact, Storage}
import scalax.file.defaultfs.DefaultPath
/**
* @author <a href="mailto:dev@davidsoergel.com">David Soergel</a>
*/
object LocalExecutionStrategy extends SystemExecutionStrategy with Logging {
def apply(pr: BlockedProvenance[ManagedPath], reifiedScriptF: Future[Successful[String]], reifiedDependenciesF: Future[Iterable[(String, Successful[Any])]]): Future[Successful[ManagedPath]] = {
for (reifiedScript <- reifiedScriptF;
reifiedDependencies <- reifiedDependenciesF
) yield {
val reifiedDependenciesM = reifiedDependencies.toMap
systemExecuteWithArgs(pr.pending(Set(reifiedScript), reifiedDependenciesM), reifiedScript, reifiedDependenciesM)
}
}
private def systemExecuteWithArgs(pp: PendingProvenance[ManagedPath], reifiedScript: Successful[String], reifiedDependencies: GenMap[String, Successful[_]]): Successful[ManagedPath] = {
// this path does not yet exist.
// the derivation may write a single file to it, or create a directory there.
val outputId: Identifier[ManagedPath] = Storage.fileStore.newId
val outputPath: Path = Storage.fileStore.getOrCreate(outputId)
val workingDir: DefaultPath = Path.createTempDirectory(dir = WorldMakeConfig.localTempDir, deleteOnExit = !WorldMakeConfig.debugWorkingDirectories)
//val log: File = (outputPath / "worldmake.log").fileOption.getOrElse(throw new Error("can't create log: " + outputPath / "worldmake.log"))
//val logWriter = Resource.fromFile(log)
val logWriter = new LocalWriteableStringOrManagedFile(Storage.logStore)
val dependenciesEnvironment: GenMap[String, String] = reifiedDependencies.mapValues(_.output.environmentString)
val environment: GenMap[String, String] = WorldMakeConfig.globalEnvironment ++ dependenciesEnvironment ++ Map("out" -> outputPath.toAbsolute.path) //, "PATH" -> WorldMakeConfig.globalPath)
val runner = Resource.fromFile((workingDir / "worldmake.runner").toRealPath().jfile)
runner.write(reifiedScript.output.value)
val envlog = Resource.fromFile((workingDir / "worldmake.environment").toRealPath().jfile)
envlog.write(environment.map({
case (k, v) => k + " = " + v
}).mkString("\\n"))
val pb = Process(Seq("/bin/sh", "./worldmake.runner"), workingDir.jfile, environment.toArray: _*)
val prs = pp.running(new MemoryLocalRunningInfo(workingDir)) // process ID not available
// any successful output should be written to a file in the output directory, so anything on stdout or stderr is
// logging output and should be combined for easier debugging
val pbLogger = ProcessLogger(
(o: String) => logWriter.write(o),
(e: String) => logWriter.write(e))
val exitCode = pb ! pbLogger
// todo: detect retained dependencies like Nix
/*
val requestedType = {
classManifest[T].toString //match { case TypeRef(pre, sym, args) => args }
}
val result = TypedPathArtifact[T](TypedPathMapper.map(requestedType, outputPath)) //TypedPathArtifact(outputPath)
*/
val result = ManagedPathArtifact(ManagedPath(outputId))
if (exitCode != 0) {
logger.warn("Deleting output: " + outputPath)
outputPath.deleteRecursively()
logger.warn("Retaining working directory: " + workingDir)
val f = prs.failed(exitCode, Some(logWriter), Map.empty)
throw FailedRecipeException(logWriter.getString, f)
}
if (WorldMakeConfig.debugWorkingDirectories) {
logger.warn("Retaining working directory: " + workingDir)
} else {
workingDir.deleteRecursively()
}
prs.completed(exitCode, Some(logWriter), Map.empty, result)
}
}
| davidsoergel/worldmake | src/main/scala/worldmake/executionstrategy/LocalExecutionStrategy.scala | Scala | apache-2.0 | 4,304 |
val x = play {
// RandSeed.ir(trig = 1, seed = 56789.0)
val bRF_0 = BRF.ar(695.37335, freq = -0.0029116, rq = 419.73846)
val gbmanL_0 = GbmanL.ar(freq = 419.73846, xi = 0.00788784, yi = -2726.2134)
val unaryOpUGen = gbmanL_0.log
val fBSineN = FBSineN.ar(freq = 12.325766, im = 637.2363, fb = -0.0029116, a = -2029.8915, c = 582.82227, xi = 23.868387, yi = 0.262003)
val xpos = LeastChange.ar(a = 419.73846, b = 12.325766)
val xi_0 = Pan4.ar(fBSineN, xpos = xpos, ypos = 254.25714, level = 582.82227)
val gbmanL_1 = GbmanL.ar(freq = 0.262003, xi = xi_0, yi = 22.71261)
val in_0 = FBSineN.ar(freq = 419.73846, im = 0.262003, fb = -0.0029116, a = -2029.8915, c = 637.2363, xi = 23.868387, yi = 582.82227)
val freq_0 = Ramp.ar(in_0, dur = 0.1)
val im_0 = SetResetFF.ar(trig = 12.325766, reset = 12.325766)
val formFreq = FBSineL.ar(freq = 419.73846, im = im_0, fb = 419.73846, a = 1.1, c = 0.020259222, xi = fBSineN, yi = 2.3985734)
val a_0 = Formant.ar(fundFreq = 419.73846, formFreq = formFreq, bw = 254.25714)
val bBandStop_0 = BBandStop.ar(0.00788784, freq = 0.262003, bw = 637.2363)
val standardN = StandardN.ar(freq = 107.30127, k = 9.444879E-4, xi = bBandStop_0, yi = -962.5887)
val bBandStop_1 = BBandStop.ar(637.2363, freq = 0.262003, bw = 0.00788784)
val bRF_1 = BRF.ar(bBandStop_1, freq = bBandStop_0, rq = 0.00788784)
val latoocarfianL = LatoocarfianL.ar(freq = freq_0, a = a_0, b = 12.325766, c = 2.3985734, d = 107.30127, xi = bBandStop_1, yi = -466.74478)
val lFClipNoise = LFClipNoise.ar(-466.74478)
val mix = Mix(Seq[GE](lFClipNoise, latoocarfianL, bRF_1, standardN, gbmanL_1, unaryOpUGen, bRF_0))
val mono = Mix.Mono(mix)
val leak = LeakDC.ar(mono)
val bad = CheckBadValues.ar(leak, post = 0)
val gate = Gate.ar(leak, bad sig_== 0)
val lim = Pan2.ar(Limiter.ar(gate)) * "amp".kr(0.1) // * DelayN.ar(Line.ar(0, 1, 1), 0.2, 0.2)
Out.ar(0, lim)
}
| Sciss/Grenzwerte | individual_sounds/1630_242.scala | Scala | gpl-3.0 | 2,031 |
package ohnosequences.scarph.test.titan
import ohnosequences.scarph._
trait AnyTVertex extends AnyVertex { tvertex =>
type Rep = com.thinkaurelius.titan.core.TitanVertex
/* Reading any property from a TitanVertex */
import AnyProperty._
implicit def readFromTitanVertex(vr: TaggedRep) =
new ReadFrom[TaggedRep](vr) {
def apply[P <: AnyProperty](p: P): p.Rep = vr.getProperty[p.Rep](p.label)
}
/* Getting a property from any TitanVertex */
import SmthHasProperty._
implicit def unsafeGetProperty[P <: AnyProperty: PropertyOf[this.Tpe]#is](p: P) =
new GetProperty[P](p) {
def apply(rep: TaggedRep): p.Rep = rep.getProperty[p.Rep](p.label)
}
// TODO: provide ReadFrom for %:
/* Retrieving edges */
import com.tinkerpop.blueprints.Direction
import scala.collection.JavaConversions._
// TODO: when we get all edges with the given label, they can come from vertices with the wrong type
/* OUT */
implicit def unsafeRetrieveOneOutEdge[
E <: Singleton with AnyEdge { type Tpe <: From[tvertex.Tpe] with OneOut }
](e: E): RetrieveOutEdge[E] = new RetrieveOutEdge[E](e) {
def apply(rep: tvertex.TaggedRep): e.tpe.Out[e.TaggedRep] = {
val it = rep.getEdges(Direction.OUT, e.tpe.label).asInstanceOf[java.lang.Iterable[e.TaggedRep]]
it.headOption: Option[e.TaggedRep]
}
}
implicit def unsafeRetrieveManyOutEdge[
E <: Singleton with AnyEdge { type Tpe <: From[tvertex.Tpe] with ManyOut }
](e: E): RetrieveOutEdge[E] = new RetrieveOutEdge[E](e) {
def apply(rep: tvertex.TaggedRep): e.tpe.Out[e.TaggedRep] = {
val it = rep.getEdges(Direction.OUT, e.tpe.label).asInstanceOf[java.lang.Iterable[e.TaggedRep]]
it.toList: List[e.TaggedRep]
}
}
/* IN */
implicit def unsafeRetrieveOneInEdge[
E <: Singleton with AnyEdge { type Tpe <: To[tvertex.Tpe] with OneIn }
](e: E): RetrieveInEdge[E] = new RetrieveInEdge[E](e) {
def apply(rep: tvertex.TaggedRep): e.tpe.In[e.TaggedRep] = {
val it = rep.getEdges(Direction.IN, e.tpe.label).asInstanceOf[java.lang.Iterable[e.TaggedRep]]
it.headOption: Option[e.TaggedRep]
}
}
implicit def unsafeRetrieveManyInEdge[
E <: Singleton with AnyEdge { type Tpe <: To[tvertex.Tpe] with ManyIn }
](e: E): RetrieveInEdge[E] = new RetrieveInEdge[E](e) {
def apply(rep: tvertex.TaggedRep): e.tpe.In[e.TaggedRep] = {
val it = rep.getEdges(Direction.IN, e.tpe.label).asInstanceOf[java.lang.Iterable[e.TaggedRep]]
it.toList: List[e.TaggedRep]
}
}
}
class TVertex[VT <: AnyVertexType](val tpe: VT)
extends AnyTVertex { type Tpe = VT }
| alberskib/scarph | src/test/scala/ohnosequences/scarph/titan/TVertex.scala | Scala | agpl-3.0 | 2,682 |
/*
* Copyright 2014-15 Intelix Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eventstreams
import akka.actor.ActorRef
trait ServiceSubscriptionMessage[T] extends CommMessage {
val subj: T
}
trait CacheableMessage {
val canBeCached: Boolean
}
trait Subj
case class TopicKey(key: String)
object TopicWithPrefix {
def unapply(t: TopicKey): Option[(String, String)] =
t.key.indexOf(":") match {
case i if i < 0 => None
case i => Some(t.key.substring(0, i), t.key.substring(i + 1))
}
}
case class ComponentKey(key: String) {
def /(s: String) = ComponentKey(key + "/" + s)
def toActorId = key.replaceAll( """[\W]""", "_").replaceAll("__", "_")
}
case class LocalSubj(component: ComponentKey, topic: TopicKey) extends Subj {
override def toString: String = component.key + "#" + topic.key
}
case class RemoteAddrSubj(address: String, localSubj: LocalSubj) extends Subj {
override def toString: String = localSubj + "@" + address
}
case class Subscribe(sourceRef: ActorRef, subj: Any) extends ServiceSubscriptionMessage[Any]
case class Unsubscribe(sourceRef: ActorRef, subj: Any) extends ServiceSubscriptionMessage[Any]
case class Command(subj: Any, replyToSubj: Option[Any], data: Option[String] = None) extends ServiceSubscriptionMessage[Any]
case class Update(subj: Any, data: String, override val canBeCached: Boolean = true) extends ServiceSubscriptionMessage[Any] with CacheableMessage
case class CommandOk(subj: Any, data: String) extends ServiceSubscriptionMessage[Any]
case class CommandErr(subj: Any, data: String) extends ServiceSubscriptionMessage[Any]
case class Stale(subj: Any) extends ServiceSubscriptionMessage[Any] with CacheableMessage {
override val canBeCached: Boolean = true
}
case class RegisterComponent(component: ComponentKey, ref: ActorRef) | intelix/eventstreams | es-core/es-api/src/main/scala/eventstreams/ServiceSubscriptionMessage.scala | Scala | apache-2.0 | 2,349 |
package picasso.frontend.basic
object Symbol {
var idCounter = 0
def freshId = {
idCounter += 1
idCounter
}
def apply(name: String) = TermSymbol(name)
}
sealed abstract class Symbol extends Typed {
val id = Symbol.freshId
//TODO equality on id
override def equals(x: Any) = x match {
case x: Symbol => x.id == id
case _ => false
}
override def hashCode() = id.hashCode()
}
case object NoSymbol extends Symbol {}
case object ErrorSymbol extends Symbol {}
case class TermSymbol(name: String) extends Symbol {
//TODO
override def toString = name + "#" + id
}
trait Sym {
var symbol: Symbol = NoSymbol
def setSymbol(s: Symbol): this.type = {
symbol = s
this
}
}
| dzufferey/picasso | frontend/basic/src/main/scala/picasso/frontend/basic/Symbol.scala | Scala | bsd-2-clause | 722 |
import leon.lang._
object Sqrt {
def isqrt(num0: Int): Int = {
require(num0 >= 0)
var num = num0
var res: Int = 0
var bit: Int = 1 << 30 // The second-to-top bit is set: 1 << 30 for 32 bits
// "bit" starts at the highest power of four <= the argument.
while (bit > num) {
bit >>= 2
}
(while (bit != 0) {
if (num >= res + bit) {
num = num - (res + bit)
res = (res >> 1) + bit
} else {
res >>= 1
}
bit >>= 2
}) invariant (res * res <= num0)
res
} ensuring (res => res * res <= num0 && (res + 1)*(res + 1) > num0)
def buggyIsqrt(num0: Int): Int = {
require(num0 >= 0)
var num = num0
var res: Int = 0
var bit: Int = 1 << 30 // The second-to-top bit is set: 1 << 30 for 32 bits
// "bit" starts at the highest power of four <= the argument.
while (bit > num) {
bit >>= 2
}
(while (bit != 0) {
if (num >= res + bit) {
num = num - res + bit //bug here, missing parenthesis
res = (res >> 1) + bit
} else {
res >>= 1
}
bit >>= 2
}) invariant (res * res <= num0)
res
} ensuring (res => res * res <= num0 && (res + 1)*(res + 1) > num0)
}
| regb/leon | testcases/verification/xlang/Sqrt.scala | Scala | gpl-3.0 | 1,251 |
package reswing
import scala.swing.{Color, Dimension, Font, Table}
import scala.swing.Table.{AutoResizeMode, ElementMode, IntervalMode}
import scala.swing.event.{
TableChanged, TableColumnsSelected, TableRowsAdded, TableRowsRemoved, TableRowsSelected, TableStructureChanged,
TableUpdated
}
class ReTable[A <: AnyRef](
val rowData: ReSwingValue[Seq[Seq[A]]] = ReSwingNoValue[Seq[Seq[A]]](),
val columnNames: ReSwingValue[Seq[String]] = (),
val editable: ReSwingValue[ReTable.Editable] = (),
val rowHeight: ReSwingValue[Int] = (),
val autoResizeMode: ReSwingValue[AutoResizeMode.Value] = (),
val gridColor: ReSwingValue[Color] = (),
val showHorizontalLines: ReSwingValue[Boolean] = (),
val showVerticalLines: ReSwingValue[Boolean] = (),
val fillsViewportHeight: ReSwingValue[Boolean] = (),
val selectionForeground: ReSwingValue[Color] = (),
val selectionBackground: ReSwingValue[Color] = (),
selectColumnInterval: ReSwingEvent[(Int, Int)] = (),
selectRowInterval: ReSwingEvent[(Int, Int)] = (),
selectAll: ReSwingEvent[Unit] = (),
clearSelection: ReSwingEvent[Unit] = (),
`selection.intervalMode`: ReSwingValue[IntervalMode.Value] = (),
`selection.elementMode`: ReSwingValue[ElementMode.Value] = (),
background: ReSwingValue[Color] = (),
foreground: ReSwingValue[Color] = (),
font: ReSwingValue[Font] = (),
enabled: ReSwingValue[Boolean] = (),
minimumSize: ReSwingValue[Dimension] = (),
maximumSize: ReSwingValue[Dimension] = (),
preferredSize: ReSwingValue[Dimension] = ()
) extends ReComponent(background, foreground, font, enabled, minimumSize, maximumSize, preferredSize) {
override protected lazy val peer = new Table with ComponentMixin
private var model: javax.swing.table.TableModel = _
val modelListener = new javax.swing.event.TableModelListener {
def tableChanged(e: javax.swing.event.TableModelEvent) =
peer publish (
e.getType match {
case javax.swing.event.TableModelEvent.UPDATE =>
if (
e.getFirstRow == 0 &&
e.getLastRow == Int.MaxValue &&
e.getColumn == javax.swing.event.TableModelEvent.ALL_COLUMNS
)
TableChanged(peer)
else if (e.getFirstRow == javax.swing.event.TableModelEvent.HEADER_ROW)
TableStructureChanged(peer)
else
TableUpdated(peer, e.getFirstRow to e.getLastRow, e.getColumn)
case javax.swing.event.TableModelEvent.INSERT =>
TableRowsAdded(peer, e.getFirstRow to e.getLastRow)
case javax.swing.event.TableModelEvent.DELETE =>
TableRowsRemoved(peer, e.getFirstRow to e.getLastRow)
}
)
}
def modelChanged(): Unit = {
if (model != null)
model removeTableModelListener modelListener
if (peer.peer.getModel != null)
peer.peer.getModel addTableModelListener modelListener
model = peer.peer.getModel
}
peer.peer setModel new ReTable.ReTableModel[A]
modelChanged()
rowData.using(
{ () =>
peer.peer.getModel match {
case model: ReTable.ReTableModel[A @unchecked] =>
model.getRowData
case model =>
for (r <- 0 to model.getRowCount())
yield {
for (c <- 0 to model.getColumnCount())
yield (model.getValueAt(r, c)).asInstanceOf[A]
}
}
},
{ rowData =>
(peer.peer.getModel match {
case model: ReTable.ReTableModel[A @unchecked] => model
case _ =>
val model = new ReTable.ReTableModel[A]
peer.peer setModel model
modelChanged()
model
})() = Left(rowData)
},
classOf[TableChanged],
classOf[TableRowsRemoved],
classOf[TableRowsAdded],
classOf[TableStructureChanged],
classOf[TableUpdated]
)
columnNames.using(
{ () =>
peer.peer.getModel match {
case model: ReTable.ReTableModel[_] =>
model.getColumnNames
case model =>
for (c <- 0 to model.getColumnCount())
yield model getColumnName c
}
},
{ columnNames =>
(peer.peer.getModel match {
case model: ReTable.ReTableModel[_] => model
case _ =>
val model = new ReTable.ReTableModel[A]
peer.peer setModel model
modelChanged()
model
})() = Right(columnNames)
},
classOf[TableStructureChanged]
)
editable.using(
{ () =>
peer.peer.getModel match {
case model: ReTable.ReTableModel[_] =>
model.getCellEditable
case model =>
(row, column) => model.isCellEditable(row, column)
}
},
{ editable =>
(peer.peer.getModel match {
case model: ReTable.ReTableModel[_] => model
case _ =>
val model = new ReTable.ReTableModel[A]
peer.peer setModel model
modelChanged()
model
}) setCellEditable editable
},
classOf[TableStructureChanged]
)
rowHeight.using({ () => peer.rowHeight }, peer.rowHeight = _, "rowHeight")
autoResizeMode.using({ () => peer.autoResizeMode }, peer.autoResizeMode = _, "autoResizeMode")
showHorizontalLines.using(
() => peer.peer.getShowHorizontalLines(),
{ peer.peer.setShowHorizontalLines(_) },
"showHorizontalLines"
)
showVerticalLines.using(
() => peer.peer.getShowVerticalLines(),
{ peer.peer.setShowVerticalLines(_) },
"showVerticalLines"
)
gridColor.using({ () => peer.gridColor }, peer.gridColor = _, "gridColor")
fillsViewportHeight.using(
() => peer.peer.getFillsViewportHeight(),
{ peer.peer.setFillsViewportHeight(_) },
"fillsViewportHeight"
)
selectionForeground.using(() => peer.selectionForeground, peer.selectionForeground = _, "selectionForeground")
selectionBackground.using({ () => peer.selectionBackground }, peer.selectionBackground = _, "selectionBackground")
selectColumnInterval using { range =>
if (range._1 == -1 || range._2 == -1)
peer.peer.clearSelection
else
peer.peer.setColumnSelectionInterval(range._1, range._2)
}
selectRowInterval using { range =>
if (range._1 == -1 || range._2 == -1)
peer.peer.clearSelection
else
peer.peer.setRowSelectionInterval(range._1, range._2)
}
selectAll.using(() => peer.peer.selectAll())
clearSelection.using(() => peer.peer.clearSelection())
val changed = ReSwingEvent using classOf[TableChanged]
val structureChanged = ReSwingEvent using classOf[TableStructureChanged]
val updated = ReSwingEvent using classOf[TableUpdated]
val rowsAdded = ReSwingEvent using classOf[TableRowsAdded]
val rowsRemoved = ReSwingEvent using classOf[TableRowsRemoved]
class ReSelection(
val intervalMode: ReSwingValue[IntervalMode.Value],
val elementMode: ReSwingValue[ElementMode.Value]
) {
protected[ReTable] val peer = ReTable.this.peer.selection
val columnLeadIndex = ReSwingValue.using(
{ () => peer.columns.leadIndex },
(peer, classOf[TableColumnsSelected])
)
val columnAnchorIndex = ReSwingValue.using(
{ () => peer.columns.anchorIndex },
(peer, classOf[TableColumnsSelected])
)
val rowLeadIndex = ReSwingValue.using(
{ () => peer.rows.leadIndex },
(peer, classOf[TableRowsSelected])
)
val rowAnchorIndex = ReSwingValue.using(
{ () => peer.rows.anchorIndex },
(peer, classOf[TableRowsSelected])
)
val columns = ReSwingValue.using(
{ () => peer.columns.toSet },
(peer, classOf[TableColumnsSelected])
)
val rows = ReSwingValue.using(
{ () => peer.rows.toSet },
(peer, classOf[TableRowsSelected])
)
val cells = ReSwingValue.using(
{ () => peer.cells.toSet },
(peer, classOf[TableColumnsSelected]),
(peer, classOf[TableRowsSelected])
)
intervalMode.using({ () => peer.intervalMode }, peer.intervalMode_= _)
elementMode.using(
{ () => peer.elementMode },
peer.elementMode = _,
"columnSelectionAllowed",
"rowSelectionAllowed",
"cellSelectionEnabled"
)
val columnsSelected = ReSwingEvent.using(peer, classOf[TableColumnsSelected])
val rowsSelected = ReSwingEvent.using(peer, classOf[TableRowsSelected])
}
object selection
extends ReSelection(
`selection.intervalMode`,
`selection.elementMode`
)
}
object ReTable {
implicit def toTable[A <: AnyRef](component: ReTable[A]): Table = component.peer
type Editable = (Int, Int) => Boolean
object Editable {
val All: Editable = (_, _) => true
val None: Editable = (_, _) => false
}
class ReTableModel[A <: AnyRef] extends javax.swing.table.AbstractTableModel {
private var rowData = Seq.empty[Seq[A]]
private var columnNames = Seq.empty[String]
private var editable: Editable = _
def update(values: Either[Seq[Seq[A]], Seq[String]]): Unit = {
values match {
case Left(data) =>
rowData = data
fireTableDataChanged
case Right(names) =>
columnNames = names
fireTableStructureChanged
}
}
def setCellEditable(cellEditable: Editable): Unit = {
editable = cellEditable
}
def getRowData = rowData
def getColumnNames = columnNames
def getCellEditable = editable
def getRowCount = rowData.length
def getColumnCount = columnNames.length
def getValueAt(row: Int, col: Int) = {
if (rowData.isDefinedAt(row)) {
val data = rowData(row)
if (data.isDefinedAt(col))
data(col)
else
null
} else
null
}
override def getColumnName(column: Int) = columnNames(column).toString
override def isCellEditable(row: Int, column: Int) =
if (editable != null)
editable(row, column)
else
false
}
}
| guidosalva/REScala | Code/Extensions/RESwing/src/main/scala/reswing/ReTable.scala | Scala | apache-2.0 | 9,978 |
/**
* Created on: Mar 6, 2013
*/
package com.tubros.constraints.api
/**
* The '''solver''' `package` defines CSP types which reify abstractions
* involved in providing CSP solver functionality to Smocs.
*
* @author svickers
*
*/
package object solver
{
/// Class Types
type GlobalConstraint[A] = PartialFunction[Seq[Answer[A]], Boolean]
}
| osxhacker/smocs | smocs-api/src/main/scala/com/tubros/constraints/api/solver/package.scala | Scala | bsd-3-clause | 351 |
package io.buoyant.linkerd
import com.fasterxml.jackson.annotation.JsonIgnore
import com.twitter.finagle.{ServiceFactory, Stack, Stackable}
import io.buoyant.router.ForwardedHeaderLabeler
case class AddForwardedHeaderConfig(
by: Option[LabelerConfig],
`for`: Option[LabelerConfig]
) {
/** Appends AddForwardedHeader params to the given params. */
@JsonIgnore
def ++:(params: Stack.Params): Stack.Params =
params + ForwardedHeaderLabeler.Enabled(true) + byParam(params) + forParam(params)
@JsonIgnore
private[this] def byParam(params: Stack.Params): ForwardedHeaderLabeler.By =
by match {
case None => ForwardedHeaderLabeler.By.default
case Some(config) => ForwardedHeaderLabeler.By(config.mk(params))
}
@JsonIgnore
private[this] def forParam(params: Stack.Params): ForwardedHeaderLabeler.For =
`for` match {
case None => ForwardedHeaderLabeler.For.default
case Some(config) => ForwardedHeaderLabeler.For(config.mk(params))
}
}
/**
* Translates AddForwardedHeaderConfig.Param into AddForwardedHeader
* configuration parameters.
*/
object AddForwardedHeaderConfig {
case class Param(config: Option[AddForwardedHeaderConfig])
implicit object Param extends Stack.Param[Param] {
val default = Param(None)
}
/**
* Configures parameters for `AddForwardedHeader`.
*
* Because `AddForwardedHeaderConfig` types may depend on stack
* parameters (for instance, `Server.RouterLabel`)
*/
def module[Req, Rep]: Stackable[ServiceFactory[Req, Rep]] =
new Stack.Module[ServiceFactory[Req, Rep]] {
val role = Stack.Role("ConfigureAddForwardedHeader")
val description = "Adds params to configure an AddForwardedHeader module"
val parameters = Seq(implicitly[Stack.Param[Param]])
private type Stk = Stack[ServiceFactory[Req, Rep]]
def make(params: Stack.Params, stk: Stk) = params[Param] match {
case Param(None) => stk
case Param(Some(config)) =>
// Wrap the underlying stack, applying the ForwardedHeaderConfig
val mkNext: (Stack.Params, Stk) => Stk =
(prms, next) => Stack.leaf(this, next.make(prms ++: config))
Stack.node(this, mkNext, stk)
}
}
}
| linkerd/linkerd | linkerd/core/src/main/scala/io/buoyant/linkerd/AddForwardedHeaderConfig.scala | Scala | apache-2.0 | 2,237 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala.collection
import generic._
import Seq.fill
import TraversableView.NoBuilder
/** A template trait for non-strict views of sequences.
* $seqViewInfo
*
* @define seqViewInfo
* $viewInfo
* All views for sequences are defined by re-interpreting the `length` and `apply` methods.
*
* @author Martin Odersky
* @version 2.8
* @since 2.8
* @tparam A the element type of the view
* @tparam Coll the type of the underlying collection containing the elements.
* @tparam This the type of the view itself
*/
trait SeqViewLike[+A,
+Coll,
+This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]]
extends Seq[A] with SeqLike[A, This] with IterableView[A, Coll] with IterableViewLike[A, Coll, This]
{ self =>
trait Transformed[+B] extends SeqView[B, Coll] with super.Transformed[B] {
override def length: Int
override def apply(idx: Int): B
}
trait Forced[B] extends Transformed[B] with super.Forced[B] {
override def length = forced.length
override def apply(idx: Int) = forced.apply(idx)
}
trait Sliced extends Transformed[A] with super.Sliced {
override def length = ((until min self.length) - from) max 0
override def apply(idx: Int): A =
if (idx + from < until) self.apply(idx + from)
else throw new IndexOutOfBoundsException(idx.toString)
}
trait Mapped[B] extends Transformed[B] with super.Mapped[B] {
override def length = self.length
override def apply(idx: Int): B = mapping(self apply idx)
}
trait FlatMapped[B] extends Transformed[B] with super.FlatMapped[B] {
protected[this] lazy val index = {
val index = new Array[Int](self.length + 1)
index(0) = 0
for (i <- 0 until self.length)
index(i + 1) = index(i) + mapping(self(i)).size
index
}
protected[this] def findRow(idx: Int, lo: Int, hi: Int): Int = {
val mid = (lo + hi) / 2
if (idx < index(mid)) findRow(idx, lo, mid - 1)
else if (idx >= index(mid + 1)) findRow(idx, mid + 1, hi)
else mid
}
override def length = index(self.length)
override def apply(idx: Int) = {
val row = findRow(idx, 0, self.length - 1)
mapping(self(row)).toSeq(idx - index(row))
}
}
trait Appended[B >: A] extends Transformed[B] with super.Appended[B] {
protected[this] lazy val restSeq = rest.toSeq
override def length = self.length + restSeq.length
override def apply(idx: Int) =
if (idx < self.length) self(idx) else restSeq(idx - self.length)
}
trait Filtered extends Transformed[A] with super.Filtered {
protected[this] lazy val index = {
var len = 0
val arr = new Array[Int](self.length)
for (i <- 0 until self.length)
if (pred(self(i))) {
arr(len) = i
len += 1
}
arr take len
}
override def length = index.length
override def apply(idx: Int) = self(index(idx))
}
trait TakenWhile extends Transformed[A] with super.TakenWhile {
protected[this] lazy val len = self prefixLength pred
override def length = len
override def apply(idx: Int) =
if (idx < len) self(idx)
else throw new IndexOutOfBoundsException(idx.toString)
}
trait DroppedWhile extends Transformed[A] with super.DroppedWhile {
protected[this] lazy val start = self prefixLength pred
override def length = self.length - start
override def apply(idx: Int) =
if (idx >= 0) self(idx + start)
else throw new IndexOutOfBoundsException(idx.toString)
}
trait Zipped[B] extends Transformed[(A, B)] with super.Zipped[B] {
protected[this] lazy val thatSeq = other.toSeq
/* Have to be careful here - other may be an infinite sequence. */
override def length = if ((thatSeq lengthCompare self.length) <= 0) thatSeq.length else self.length
override def apply(idx: Int) = (self.apply(idx), thatSeq.apply(idx))
}
trait ZippedAll[A1 >: A, B] extends Transformed[(A1, B)] with super.ZippedAll[A1, B] {
protected[this] lazy val thatSeq = other.toSeq
override def length: Int = self.length max thatSeq.length
override def apply(idx: Int) =
(if (idx < self.length) self.apply(idx) else thisElem,
if (idx < thatSeq.length) thatSeq.apply(idx) else thatElem)
}
trait Reversed extends Transformed[A] {
override def iterator: Iterator[A] = createReversedIterator
override def length: Int = self.length
override def apply(idx: Int): A = self.apply(length - 1 - idx)
override def stringPrefix = self.stringPrefix+"R"
private def createReversedIterator = {
var lst = List[A]()
for (elem <- self) lst ::= elem
lst.iterator
}
}
trait Patched[B >: A] extends Transformed[B] {
protected[this] val from: Int
protected[this] val patch: Seq[B]
protected[this] val replaced: Int
private lazy val plen = patch.length
override def iterator: Iterator[B] = self.iterator patch (from, patch.iterator, replaced)
override def length: Int = self.length + plen - replaced
override def apply(idx: Int): B =
if (idx < from) self.apply(idx)
else if (idx < from + plen) patch.apply(idx - from)
else self.apply(idx - plen + replaced)
override def stringPrefix = self.stringPrefix+"P"
}
trait Prepended[B >: A] extends Transformed[B] {
protected[this] val fst: B
override def iterator: Iterator[B] = Iterator.single(fst) ++ self.iterator
override def length: Int = 1 + self.length
override def apply(idx: Int): B =
if (idx == 0) fst
else self.apply(idx - 1)
override def stringPrefix = self.stringPrefix+"A"
}
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
protected override def newForced[B](xs: => Seq[B]): Transformed[B] = new Forced[B] { val forced = xs }
protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
protected override def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
protected override def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
protected override def newFiltered(p: A => Boolean): Transformed[A] = new Filtered { val pred = p }
protected override def newSliced(_from: Int, _until: Int): Transformed[A] = new Sliced { val from = _from; val until = _until }
protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new DroppedWhile { val pred = p }
protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new TakenWhile { val pred = p }
protected override def newZipped[B](that: Iterable[B]): Transformed[(A, B)] = new Zipped[B] { val other = that }
protected override def newZippedAll[A1 >: A, B](that: Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new ZippedAll[A1, B] { val other = that; val thisElem = _thisElem; val thatElem = _thatElem }
protected def newReversed: Transformed[A] = new Reversed { }
protected def newPatched[B >: A](_from: Int, _patch: Seq[B], _replaced: Int): Transformed[B] = new Patched[B] { val from = _from; val patch = _patch; val replaced = _replaced }
protected def newPrepended[B >: A](elem: B): Transformed[B] = new Prepended[B] { protected[this] val fst = elem }
override def reverse: This = newReversed.asInstanceOf[This]
override def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That = {
newPatched(from, patch, replaced).asInstanceOf[That]
// was: val b = bf(repr)
// if (b.isInstanceOf[NoBuilder[_]]) newPatched(from, patch, replaced).asInstanceOf[That]
// else super.patch[B, That](from, patch, replaced)(bf)
}
override def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
patch(length, fill(len - length)(elem), 0)
override def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That =
reverse.map(f)
override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That = {
require(0 <= index && index < length)
patch(index, List(elem), 1)(bf)
}
override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
newPrepended(elem).asInstanceOf[That]
override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
++(Iterator.single(elem))(bf)
override def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[This, B, That]): That =
newForced(thisSeq union that).asInstanceOf[That]
override def diff[B >: A](that: Seq[B]): This =
newForced(thisSeq diff that).asInstanceOf[This]
override def intersect[B >: A](that: Seq[B]): This =
newForced(thisSeq intersect that).asInstanceOf[This]
override def sorted[B >: A](implicit ord: Ordering[B]): This =
newForced(thisSeq sorted ord).asInstanceOf[This]
override def stringPrefix = "SeqView"
}
| cran/rkafkajars | java/scala/collection/SeqViewLike.scala | Scala | apache-2.0 | 9,628 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.controller
import kafka.server.ConfigType
import org.apache.kafka.common.protocol.Errors
import org.apache.kafka.common.requests.{StopReplicaResponse, AbstractRequestResponse}
import collection.mutable
import collection.JavaConverters._
import kafka.utils.{ShutdownableThread, Logging, ZkUtils}
import kafka.utils.CoreUtils._
import collection.Set
import kafka.common.TopicAndPartition
import java.util.concurrent.locks.ReentrantLock
import java.util.concurrent.atomic.AtomicBoolean
/**
* This manages the state machine for topic deletion.
* 1. TopicCommand issues topic deletion by creating a new admin path /admin/delete_topics/<topic>
* 2. The controller listens for child changes on /admin/delete_topic and starts topic deletion for the respective topics
* 3. The controller has a background thread that handles topic deletion. The purpose of having this background thread
* is to accommodate the TTL feature, when we have it. This thread is signaled whenever deletion for a topic needs to
* be started or resumed. Currently, a topic's deletion can be started only by the onPartitionDeletion callback on the
* controller. In the future, it can be triggered based on the configured TTL for the topic. A topic will be ineligible
* for deletion in the following scenarios -
* 3.1 broker hosting one of the replicas for that topic goes down
* 3.2 partition reassignment for partitions of that topic is in progress
* 3.3 preferred replica election for partitions of that topic is in progress
* (though this is not strictly required since it holds the controller lock for the entire duration from start to end)
* 4. Topic deletion is resumed when -
* 4.1 broker hosting one of the replicas for that topic is started
* 4.2 preferred replica election for partitions of that topic completes
* 4.3 partition reassignment for partitions of that topic completes
* 5. Every replica for a topic being deleted is in either of the 3 states -
* 5.1 TopicDeletionStarted (Replica enters TopicDeletionStarted phase when the onPartitionDeletion callback is invoked.
* This happens when the child change watch for /admin/delete_topics fires on the controller. As part of this state
* change, the controller sends StopReplicaRequests to all replicas. It registers a callback for the
* StopReplicaResponse when deletePartition=true thereby invoking a callback when a response for delete replica
* is received from every replica)
* 5.2 TopicDeletionSuccessful (deleteTopicStopReplicaCallback() moves replicas from
* TopicDeletionStarted->TopicDeletionSuccessful depending on the error codes in StopReplicaResponse)
* 5.3 TopicDeletionFailed. (deleteTopicStopReplicaCallback() moves replicas from
* TopicDeletionStarted->TopicDeletionFailed depending on the error codes in StopReplicaResponse.
* In general, if a broker dies and if it hosted replicas for topics being deleted, the controller marks the
* respective replicas in TopicDeletionFailed state in the onBrokerFailure callback. The reason is that if a
* broker fails before the request is sent and after the replica is in TopicDeletionStarted state,
* it is possible that the replica will mistakenly remain in TopicDeletionStarted state and topic deletion
* will not be retried when the broker comes back up.)
* 6. The delete topic thread marks a topic successfully deleted only if all replicas are in TopicDeletionSuccessful
* state and it starts the topic deletion teardown mode where it deletes all topic state from the controllerContext
* as well as from zookeeper. This is the only time the /brokers/topics/<topic> path gets deleted. On the other hand,
* if no replica is in TopicDeletionStarted state and at least one replica is in TopicDeletionFailed state, then
* it marks the topic for deletion retry.
* @param controller
* @param initialTopicsToBeDeleted The topics that are queued up for deletion in zookeeper at the time of controller failover
* @param initialTopicsIneligibleForDeletion The topics ineligible for deletion due to any of the conditions mentioned in #3 above
*/
class TopicDeletionManager(controller: KafkaController,
initialTopicsToBeDeleted: Set[String] = Set.empty,
initialTopicsIneligibleForDeletion: Set[String] = Set.empty) extends Logging {
this.logIdent = "[Topic Deletion Manager " + controller.config.brokerId + "], "
val controllerContext = controller.controllerContext
val partitionStateMachine = controller.partitionStateMachine
val replicaStateMachine = controller.replicaStateMachine
val topicsToBeDeleted: mutable.Set[String] = mutable.Set.empty[String] ++ initialTopicsToBeDeleted
val partitionsToBeDeleted: mutable.Set[TopicAndPartition] = topicsToBeDeleted.flatMap(controllerContext.partitionsForTopic)
val deleteLock = new ReentrantLock()
val topicsIneligibleForDeletion: mutable.Set[String] = mutable.Set.empty[String] ++
(initialTopicsIneligibleForDeletion & initialTopicsToBeDeleted)
val deleteTopicsCond = deleteLock.newCondition()
val deleteTopicStateChanged: AtomicBoolean = new AtomicBoolean(false)
var deleteTopicsThread: DeleteTopicsThread = null
val isDeleteTopicEnabled = controller.config.deleteTopicEnable
/**
* Invoked at the end of new controller initiation
*/
def start() {
if (isDeleteTopicEnabled) {
deleteTopicsThread = new DeleteTopicsThread()
if (topicsToBeDeleted.size > 0)
deleteTopicStateChanged.set(true)
deleteTopicsThread.start()
}
}
/**
* Invoked when the current controller resigns. At this time, all state for topic deletion should be cleared.
*/
def shutdown() {
// Only allow one shutdown to go through
if (isDeleteTopicEnabled && deleteTopicsThread.initiateShutdown()) {
// Resume the topic deletion so it doesn't block on the condition
resumeTopicDeletionThread()
// Await delete topic thread to exit
deleteTopicsThread.awaitShutdown()
topicsToBeDeleted.clear()
partitionsToBeDeleted.clear()
topicsIneligibleForDeletion.clear()
}
}
/**
* Invoked by the child change listener on /admin/delete_topics to queue up the topics for deletion. The topic gets added
* to the topicsToBeDeleted list and only gets removed from the list when the topic deletion has completed successfully
* i.e. all replicas of all partitions of that topic are deleted successfully.
* @param topics Topics that should be deleted
*/
def enqueueTopicsForDeletion(topics: Set[String]) {
if(isDeleteTopicEnabled) {
topicsToBeDeleted ++= topics
partitionsToBeDeleted ++= topics.flatMap(controllerContext.partitionsForTopic)
resumeTopicDeletionThread()
}
}
/**
* Invoked when any event that can possibly resume topic deletion occurs. These events include -
* 1. New broker starts up. Any replicas belonging to topics queued up for deletion can be deleted since the broker is up
* 2. Partition reassignment completes. Any partitions belonging to topics queued up for deletion finished reassignment
* 3. Preferred replica election completes. Any partitions belonging to topics queued up for deletion finished
* preferred replica election
* @param topics Topics for which deletion can be resumed
*/
def resumeDeletionForTopics(topics: Set[String] = Set.empty) {
if(isDeleteTopicEnabled) {
val topicsToResumeDeletion = topics & topicsToBeDeleted
if(topicsToResumeDeletion.size > 0) {
topicsIneligibleForDeletion --= topicsToResumeDeletion
resumeTopicDeletionThread()
}
}
}
/**
* Invoked when a broker that hosts replicas for topics to be deleted goes down. Also invoked when the callback for
* StopReplicaResponse receives an error code for the replicas of a topic to be deleted. As part of this, the replicas
* are moved from ReplicaDeletionStarted to ReplicaDeletionIneligible state. Also, the topic is added to the list of topics
* ineligible for deletion until further notice. The delete topic thread is notified so it can retry topic deletion
* if it has received a response for all replicas of a topic to be deleted
* @param replicas Replicas for which deletion has failed
*/
def failReplicaDeletion(replicas: Set[PartitionAndReplica]) {
if(isDeleteTopicEnabled) {
val replicasThatFailedToDelete = replicas.filter(r => isTopicQueuedUpForDeletion(r.topic))
if(replicasThatFailedToDelete.size > 0) {
val topics = replicasThatFailedToDelete.map(_.topic)
debug("Deletion failed for replicas %s. Halting deletion for topics %s"
.format(replicasThatFailedToDelete.mkString(","), topics))
controller.replicaStateMachine.handleStateChanges(replicasThatFailedToDelete, ReplicaDeletionIneligible)
markTopicIneligibleForDeletion(topics)
resumeTopicDeletionThread()
}
}
}
/**
* Halt delete topic if -
* 1. replicas being down
* 2. partition reassignment in progress for some partitions of the topic
* 3. preferred replica election in progress for some partitions of the topic
* @param topics Topics that should be marked ineligible for deletion. No op if the topic is was not previously queued up for deletion
*/
def markTopicIneligibleForDeletion(topics: Set[String]) {
if(isDeleteTopicEnabled) {
val newTopicsToHaltDeletion = topicsToBeDeleted & topics
topicsIneligibleForDeletion ++= newTopicsToHaltDeletion
if(newTopicsToHaltDeletion.size > 0)
info("Halted deletion of topics %s".format(newTopicsToHaltDeletion.mkString(",")))
}
}
def isTopicIneligibleForDeletion(topic: String): Boolean = {
if(isDeleteTopicEnabled) {
topicsIneligibleForDeletion.contains(topic)
} else
true
}
def isTopicDeletionInProgress(topic: String): Boolean = {
if(isDeleteTopicEnabled) {
controller.replicaStateMachine.isAtLeastOneReplicaInDeletionStartedState(topic)
} else
false
}
def isPartitionToBeDeleted(topicAndPartition: TopicAndPartition) = {
if(isDeleteTopicEnabled) {
partitionsToBeDeleted.contains(topicAndPartition)
} else
false
}
def isTopicQueuedUpForDeletion(topic: String): Boolean = {
if(isDeleteTopicEnabled) {
topicsToBeDeleted.contains(topic)
} else
false
}
/**
* Invoked by the delete-topic-thread to wait until events that either trigger, restart or halt topic deletion occur.
* controllerLock should be acquired before invoking this API
*/
private def awaitTopicDeletionNotification() {
inLock(deleteLock) {
while(deleteTopicsThread.isRunning.get() && !deleteTopicStateChanged.compareAndSet(true, false)) {
debug("Waiting for signal to start or continue topic deletion")
deleteTopicsCond.await()
}
}
}
/**
* Signals the delete-topic-thread to process topic deletion
*/
private def resumeTopicDeletionThread() {
deleteTopicStateChanged.set(true)
inLock(deleteLock) {
deleteTopicsCond.signal()
}
}
/**
* Invoked by the StopReplicaResponse callback when it receives no error code for a replica of a topic to be deleted.
* As part of this, the replicas are moved from ReplicaDeletionStarted to ReplicaDeletionSuccessful state. The delete
* topic thread is notified so it can tear down the topic if all replicas of a topic have been successfully deleted
* @param replicas Replicas that were successfully deleted by the broker
*/
private def completeReplicaDeletion(replicas: Set[PartitionAndReplica]) {
val successfullyDeletedReplicas = replicas.filter(r => isTopicQueuedUpForDeletion(r.topic))
debug("Deletion successfully completed for replicas %s".format(successfullyDeletedReplicas.mkString(",")))
controller.replicaStateMachine.handleStateChanges(successfullyDeletedReplicas, ReplicaDeletionSuccessful)
resumeTopicDeletionThread()
}
/**
* Topic deletion can be retried if -
* 1. Topic deletion is not already complete
* 2. Topic deletion is currently not in progress for that topic
* 3. Topic is currently marked ineligible for deletion
* @param topic Topic
* @return Whether or not deletion can be retried for the topic
*/
private def isTopicEligibleForDeletion(topic: String): Boolean = {
topicsToBeDeleted.contains(topic) && (!isTopicDeletionInProgress(topic) && !isTopicIneligibleForDeletion(topic))
}
/**
* If the topic is queued for deletion but deletion is not currently under progress, then deletion is retried for that topic
* To ensure a successful retry, reset states for respective replicas from ReplicaDeletionIneligible to OfflineReplica state
*@param topic Topic for which deletion should be retried
*/
private def markTopicForDeletionRetry(topic: String) {
// reset replica states from ReplicaDeletionIneligible to OfflineReplica
val failedReplicas = controller.replicaStateMachine.replicasInState(topic, ReplicaDeletionIneligible)
info("Retrying delete topic for topic %s since replicas %s were not successfully deleted"
.format(topic, failedReplicas.mkString(",")))
controller.replicaStateMachine.handleStateChanges(failedReplicas, OfflineReplica)
}
private def completeDeleteTopic(topic: String) {
// deregister partition change listener on the deleted topic. This is to prevent the partition change listener
// firing before the new topic listener when a deleted topic gets auto created
partitionStateMachine.deregisterPartitionChangeListener(topic)
val replicasForDeletedTopic = controller.replicaStateMachine.replicasInState(topic, ReplicaDeletionSuccessful)
// controller will remove this replica from the state machine as well as its partition assignment cache
replicaStateMachine.handleStateChanges(replicasForDeletedTopic, NonExistentReplica)
val partitionsForDeletedTopic = controllerContext.partitionsForTopic(topic)
// move respective partition to OfflinePartition and NonExistentPartition state
partitionStateMachine.handleStateChanges(partitionsForDeletedTopic, OfflinePartition)
partitionStateMachine.handleStateChanges(partitionsForDeletedTopic, NonExistentPartition)
topicsToBeDeleted -= topic
partitionsToBeDeleted.retain(_.topic != topic)
controllerContext.zkClient.deleteRecursive(ZkUtils.getTopicPath(topic))
controllerContext.zkClient.deleteRecursive(ZkUtils.getEntityConfigPath(ConfigType.Topic, topic))
controllerContext.zkClient.delete(ZkUtils.getDeleteTopicPath(topic))
controllerContext.removeTopic(topic)
}
/**
* This callback is invoked by the DeleteTopics thread with the list of topics to be deleted
* It invokes the delete partition callback for all partitions of a topic.
* The updateMetadataRequest is also going to set the leader for the topics being deleted to
* {@link LeaderAndIsr#LeaderDuringDelete}. This lets each broker know that this topic is being deleted and can be
* removed from their caches.
*/
private def onTopicDeletion(topics: Set[String]) {
info("Topic deletion callback for %s".format(topics.mkString(",")))
// send update metadata so that brokers stop serving data for topics to be deleted
val partitions = topics.flatMap(controllerContext.partitionsForTopic)
controller.sendUpdateMetadataRequest(controllerContext.liveOrShuttingDownBrokerIds.toSeq, partitions)
val partitionReplicaAssignmentByTopic = controllerContext.partitionReplicaAssignment.groupBy(p => p._1.topic)
topics.foreach { topic =>
onPartitionDeletion(partitionReplicaAssignmentByTopic(topic).map(_._1).toSet)
}
}
/**
* Invoked by the onPartitionDeletion callback. It is the 2nd step of topic deletion, the first being sending
* UpdateMetadata requests to all brokers to start rejecting requests for deleted topics. As part of starting deletion,
* the topics are added to the in progress list. As long as a topic is in the in progress list, deletion for that topic
* is never retried. A topic is removed from the in progress list when
* 1. Either the topic is successfully deleted OR
* 2. No replica for the topic is in ReplicaDeletionStarted state and at least one replica is in ReplicaDeletionIneligible state
* If the topic is queued for deletion but deletion is not currently under progress, then deletion is retried for that topic
* As part of starting deletion, all replicas are moved to the ReplicaDeletionStarted state where the controller sends
* the replicas a StopReplicaRequest (delete=true)
* This callback does the following things -
* 1. Move all dead replicas directly to ReplicaDeletionIneligible state. Also mark the respective topics ineligible
* for deletion if some replicas are dead since it won't complete successfully anyway
* 2. Move all alive replicas to ReplicaDeletionStarted state so they can be deleted successfully
*@param replicasForTopicsToBeDeleted
*/
private def startReplicaDeletion(replicasForTopicsToBeDeleted: Set[PartitionAndReplica]) {
replicasForTopicsToBeDeleted.groupBy(_.topic).foreach { case(topic, replicas) =>
var aliveReplicasForTopic = controllerContext.allLiveReplicas().filter(p => p.topic.equals(topic))
val deadReplicasForTopic = replicasForTopicsToBeDeleted -- aliveReplicasForTopic
val successfullyDeletedReplicas = controller.replicaStateMachine.replicasInState(topic, ReplicaDeletionSuccessful)
val replicasForDeletionRetry = aliveReplicasForTopic -- successfullyDeletedReplicas
// move dead replicas directly to failed state
replicaStateMachine.handleStateChanges(deadReplicasForTopic, ReplicaDeletionIneligible)
// send stop replica to all followers that are not in the OfflineReplica state so they stop sending fetch requests to the leader
replicaStateMachine.handleStateChanges(replicasForDeletionRetry, OfflineReplica)
debug("Deletion started for replicas %s".format(replicasForDeletionRetry.mkString(",")))
controller.replicaStateMachine.handleStateChanges(replicasForDeletionRetry, ReplicaDeletionStarted,
new Callbacks.CallbackBuilder().stopReplicaCallback(deleteTopicStopReplicaCallback).build)
if(deadReplicasForTopic.size > 0) {
debug("Dead Replicas (%s) found for topic %s".format(deadReplicasForTopic.mkString(","), topic))
markTopicIneligibleForDeletion(Set(topic))
}
}
}
/**
* This callback is invoked by the delete topic callback with the list of partitions for topics to be deleted
* It does the following -
* 1. Send UpdateMetadataRequest to all live brokers (that are not shutting down) for partitions that are being
* deleted. The brokers start rejecting all client requests with UnknownTopicOrPartitionException
* 2. Move all replicas for the partitions to OfflineReplica state. This will send StopReplicaRequest to the replicas
* and LeaderAndIsrRequest to the leader with the shrunk ISR. When the leader replica itself is moved to OfflineReplica state,
* it will skip sending the LeaderAndIsrRequest since the leader will be updated to -1
* 3. Move all replicas to ReplicaDeletionStarted state. This will send StopReplicaRequest with deletePartition=true. And
* will delete all persistent data from all replicas of the respective partitions
*/
private def onPartitionDeletion(partitionsToBeDeleted: Set[TopicAndPartition]) {
info("Partition deletion callback for %s".format(partitionsToBeDeleted.mkString(",")))
val replicasPerPartition = controllerContext.replicasForPartition(partitionsToBeDeleted)
startReplicaDeletion(replicasPerPartition)
}
private def deleteTopicStopReplicaCallback(stopReplicaResponseObj: AbstractRequestResponse, replicaId: Int) {
val stopReplicaResponse = stopReplicaResponseObj.asInstanceOf[StopReplicaResponse]
debug("Delete topic callback invoked for %s".format(stopReplicaResponse))
val responseMap = stopReplicaResponse.responses.asScala
val partitionsInError =
if (stopReplicaResponse.errorCode != Errors.NONE.code) responseMap.keySet
else responseMap.filter { case (_, error) => error != Errors.NONE.code }.map(_._1).toSet
val replicasInError = partitionsInError.map(p => PartitionAndReplica(p.topic, p.partition, replicaId))
inLock(controllerContext.controllerLock) {
// move all the failed replicas to ReplicaDeletionIneligible
failReplicaDeletion(replicasInError)
if (replicasInError.size != responseMap.size) {
// some replicas could have been successfully deleted
val deletedReplicas = responseMap.keySet -- partitionsInError
completeReplicaDeletion(deletedReplicas.map(p => PartitionAndReplica(p.topic, p.partition, replicaId)))
}
}
}
class DeleteTopicsThread() extends ShutdownableThread(name = "delete-topics-thread-" + controller.config.brokerId, isInterruptible = false) {
val zkClient = controllerContext.zkClient
override def doWork() {
awaitTopicDeletionNotification()
if (!isRunning.get)
return
inLock(controllerContext.controllerLock) {
val topicsQueuedForDeletion = Set.empty[String] ++ topicsToBeDeleted
if(!topicsQueuedForDeletion.isEmpty)
info("Handling deletion for topics " + topicsQueuedForDeletion.mkString(","))
topicsQueuedForDeletion.foreach { topic =>
// if all replicas are marked as deleted successfully, then topic deletion is done
if(controller.replicaStateMachine.areAllReplicasForTopicDeleted(topic)) {
// clear up all state for this topic from controller cache and zookeeper
completeDeleteTopic(topic)
info("Deletion of topic %s successfully completed".format(topic))
} else {
if(controller.replicaStateMachine.isAtLeastOneReplicaInDeletionStartedState(topic)) {
// ignore since topic deletion is in progress
val replicasInDeletionStartedState = controller.replicaStateMachine.replicasInState(topic, ReplicaDeletionStarted)
val replicaIds = replicasInDeletionStartedState.map(_.replica)
val partitions = replicasInDeletionStartedState.map(r => TopicAndPartition(r.topic, r.partition))
info("Deletion for replicas %s for partition %s of topic %s in progress".format(replicaIds.mkString(","),
partitions.mkString(","), topic))
} else {
// if you come here, then no replica is in TopicDeletionStarted and all replicas are not in
// TopicDeletionSuccessful. That means, that either given topic haven't initiated deletion
// or there is at least one failed replica (which means topic deletion should be retried).
if(controller.replicaStateMachine.isAnyReplicaInState(topic, ReplicaDeletionIneligible)) {
// mark topic for deletion retry
markTopicForDeletionRetry(topic)
}
}
}
// Try delete topic if it is eligible for deletion.
if(isTopicEligibleForDeletion(topic)) {
info("Deletion of topic %s (re)started".format(topic))
// topic deletion will be kicked off
onTopicDeletion(Set(topic))
} else if(isTopicIneligibleForDeletion(topic)) {
info("Not retrying deletion of topic %s at this time since it is marked ineligible for deletion".format(topic))
}
}
}
}
}
}
| usakey/kafka | core/src/main/scala/kafka/controller/TopicDeletionManager.scala | Scala | apache-2.0 | 24,504 |
/*
* Copyright (C) 17/05/13 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package fr.geocites.simpuzzle.neighbourhood
trait MatrixNeighbourhood {
def neighbors[T](
cells: (Int, Int) => T,
i: Int,
j: Int,
neighborhoodSize: Int): Seq[T]
}
| ISCPIF/PSEExperiments | simpuzzle-src/simpuzzle/src/main/scala/fr/geocites/simpuzzle/neighbourhood/MatrixNeighbourhood.scala | Scala | agpl-3.0 | 895 |
package app.components.mui
import japgolly.scalajs.react
import japgolly.scalajs.react.Children
import japgolly.scalajs.react.vdom.VdomNode
import scala.scalajs.js
object DialogContent {
val component = react.JsComponent[js.Object, Children.Varargs, Null](MuiComponents.DialogContent)
def apply(children: VdomNode*) = {
val props = js.Dynamic.literal()
component(props)(children:_*)
}
}
| Igorocky/lesn | client/src/main/scala/app/components/mui/DialogContent.scala | Scala | mit | 406 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.ctrl.tasks
import akka.actor.ActorSelection
import akka.util.Timeout
import cmwell.ctrl.checkers.{CassandraDown, CassandraOk, ElasticsearchDown, GreenStatus}
import cmwell.ctrl.commands._
import cmwell.ctrl.controllers.CassandraController
import cmwell.ctrl.hc._
import cmwell.ctrl.server.CommandActor
import com.typesafe.scalalogging.LazyLogging
import k.grid.Grid
import scala.concurrent.{Future, Promise}
import scala.concurrent.duration._
import akka.pattern.ask
import scala.concurrent.ExecutionContext.Implicits.global
/**
* Created by michael on 3/10/16.
*/
case class ClearNode(node: String) extends Task with LazyLogging {
implicit val timeout = Timeout(15.seconds)
private def stopElasticsearch(cmd: ActorSelection, prom: Promise[Unit]): Unit = {
logger.info(s"Stopping Elasticsearch on node $node")
cmd ! StopElasticsearch
cmd ! StopElasticsearchMaster
Grid.system.scheduler.scheduleOnce(60.seconds) {
(HealthActor.ref ? GetClusterDetailedStatus).mapTo[ClusterStatusDetailed].map { f =>
val ocs = f.esStat.get(node)
//todo: Remove this log info.
logger.info(s"ES OCS: $ocs")
ocs match {
case Some(s) =>
s match {
case ElasticsearchDown(hm, gt) => prom.success(())
case _ => stopElasticsearch(cmd, prom)
}
case None => prom.success(Unit)
}
}
}
}
private def stopCassandra(cmd: ActorSelection, prom: Promise[Unit]): Unit = {
logger.info(s"Stopping Cassandra on node $node")
cmd ! StopCassandra
CassandraController.removeCassandraDownNodes
Grid.system.scheduler.scheduleOnce(60.seconds) {
(HealthActor.ref ? GetClusterDetailedStatus).mapTo[ClusterStatusDetailed].map { f =>
val ocs = f.casStat.get(node)
//todo: Remove this log info.
logger.info(s"Cass OCS: $ocs")
ocs match {
case Some(s) =>
s match {
case co @ CassandraOk(m, rm, gt) if (co.m.isEmpty) => prom.success(Unit)
case CassandraDown(gt) => prom.success(Unit)
case _ => stopCassandra(cmd, prom)
}
case None => prom.success(Unit)
}
}
}
}
override def exec: Future[TaskResult] = {
val cmd = CommandActor.select(node)
val esPromise = Promise[Unit]
val casPromise = Promise[Unit]
stopCassandra(cmd, casPromise)
stopElasticsearch(cmd, esPromise)
// todo: kill CM-WELL processes before CAS & ES
val esCancelable = cancel(esPromise, 1.hours)
val casCancelable = cancel(casPromise, 1.hours)
val esFuture = esPromise.future
val casFuture = casPromise.future
val fut = for {
esStopped <- esFuture
casStopped <- casFuture
} yield {
logger.info("Stopping CM-WELL components")
cmd ! StopKafka
cmd ! StopWebserver
cmd ! StopBg
cmd ! StopCw
cmd ! StopDc
}
fut
.map { r =>
logger.info("Task status: TaskSuccessful")
TaskSuccessful
}
.recover {
case err: Throwable =>
logger.info("Task status: TaskFailed")
TaskFailed
}
}
}
| hochgi/CM-Well | server/cmwell-controller/src/main/scala/cmwell/ctrl/tasks/ClearNode.scala | Scala | apache-2.0 | 3,908 |
class Base[T0]
class Derived[T1] extends Base[T1]
class Foo[T2, U2]
object Foo {
implicit def mkFoo[T3, U3 <: Base[T3]](implicit ev: U3 <:< Base[T3]) : Foo[U3, Base[T3]] = ???
}
object Test {
def foo[T4, U4](t: T4)(implicit ftu: Foo[T4, U4]): U4 = ???
val bi: Base[Int] = foo(null.asInstanceOf[Derived[Int]])
}
| lrytz/scala | test/files/pos/prune-poly-bound.scala | Scala | apache-2.0 | 320 |
/*
* Copyright (C) 2015 Vladimir Konstantinov, Yuriy Gintsyak
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package io.cafebabe.util.protocol.jsonrpc
import org.json4s._
/**
* TODO: Add description.
*
* @author Vladimir Konstantinov
*/
case class JsonRpcError(code: Int, message: String, id: Int) extends JsonRpcMessage {
override val toJson = JObject(
("error", JObject(
("code", JInt(code)),
("message", JString(message))
)),
("id", JInt(id))
)
}
/**
* TODO: Add description.
*
* @author Vladimir Konstantinov
*/
object JsonRpcError {
def parseError(message: String, id: Int): JsonRpcError = apply(-32700, message, id)
def invalidRequest(message: String, id: Int): JsonRpcError = apply(-32600, message, id)
def methodNotFound(message: String, id: Int): JsonRpcError = apply(-32601, message, id)
def invalidParams(message: String, id: Int): JsonRpcError = apply(-32602, message, id)
def internalError(message: String, id: Int): JsonRpcError = apply(-32603, message, id)
def from(json: JValue): Option[JsonRpcError] = json match {
case JObject(fields) =>
(for {
("id", JInt(id)) <- fields
("error", JObject(error)) <- fields
("code", JInt(code)) <- error
("message", JString(message)) <- error
} yield JsonRpcError(code.toInt, message, id.toInt)).headOption
case _ => None
}
}
| oxy-development/util | src/main/scala/io/cafebabe/util/protocol/jsonrpc/JsonRpcError.scala | Scala | lgpl-3.0 | 2,015 |
package fail.sauce.commas
object ArgumentCommas extends App {
val foo: List[String] = List(
"A",
"B",
"C",
"D",
)
println(foo)
}
| andyscott/scala-commas | src/test/scala/fail/sauce/commas/arguments.scala | Scala | mit | 155 |
package io.questions.testdata
import io.questions.model.questionnaire.FieldName.FieldNameStringSyntax
import io.questions.model.questionnaire.QuestionText.QuestionTextSyntax
import io.questions.model.questionnaire.nodekey.NodeKey
import io.questions.model.questionnaire.{ Element, QuestionnaireId, QuestionnaireNode, QuestionnaireNodePredicate }
import io.questions.testdata.samples.questions.PersonalQuestions
import io.questions.testdata.samples.samplequestionnaire.QuestionTypes._
import io.questions.testdata.samples.enumerations.{ Title, YesNo }
object OfficerQuestionnaire {
val id: QuestionnaireId = QuestionnaireId.random
val officerNameFromOpenCorporates: QuestionnaireNode =
stringQuestion("officerName".fieldName, "Officer Name (OpenCorporates)".text).copy(editability = QuestionnaireNodePredicate.False)
val officerName: QuestionnaireNode =
PersonalQuestions.currentFullName
// val favouritePet: QuestionnaireNode =
// stringQuestion("pet".fieldName, "Favourite Pet".text)
val informationSection: QuestionnaireNode = pageQuestion(
"information".fieldName,
"Officer Information".text,
officerNameFromOpenCorporates,
officerName
// ,favouritePet
)
val approvalQuestion: QuestionnaireNode =
ExampleComponents.approvalQuestion("applicationApproved".fieldName, "Do you approve this officer?".text)
val approvalPage: QuestionnaireNode = pageQuestion(
"approvalPage".fieldName,
"Approval".text,
approvalQuestion
)
val questionnaire: QuestionnaireNode = ExampleComponents.standard(
NodeKey.random,
"officer".fieldName,
"Officer".text,
Element.NonRepeatingParent(
informationSection,
approvalPage
),
enums = Map(
YesNo.name → YesNo.values,
Title.name → Title.values
),
questionnaireId = id
)
}
| channingwalton/qanda | questionnaire/src/test/scala/io/questions/testdata/OfficerQuestionnaire.scala | Scala | mit | 1,833 |
package org.jetbrains.plugins.scala
package lang
package findUsages
import com.intellij.psi.util.PsiTreeUtil.{getParentOfType, isAncestor}
import com.intellij.psi.{PsiClass, PsiElement}
import com.intellij.usages.impl.rules.UsageType._
import com.intellij.usages.impl.rules.{UsageType, UsageTypeProviderEx}
import com.intellij.usages.{PsiElementUsageTarget, UsageTarget}
import org.jetbrains.annotations.Nls
import org.jetbrains.plugins.scala.ScalaBundle
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil.MethodValue
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns._
import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScSelfTypeElement, ScTypeArgs, ScTypeElement}
import org.jetbrains.plugins.scala.lang.psi.api.base.{ScAccessModifier, ScAnnotation, ScAnnotationExpr, ScReference}
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScClassParameter, ScParameter, ScTypeParam}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.ScImportExpr
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.{ScTemplateBody, ScTemplateParents}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTemplateDefinition
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.{ScEarlyDefinitions, ScPackaging}
import org.jetbrains.plugins.scala.lang.psi.impl.expr.ScInterpolatedExpressionPrefix
import org.jetbrains.plugins.scala.lang.resolve.ScalaResolveResult
import org.jetbrains.plugins.scala.util.ImplicitUtil._
import org.jetbrains.plugins.scala.util.SAMUtil._
import scala.language.implicitConversions
final class ScalaUsageTypeProvider extends UsageTypeProviderEx {
import ScalaUsageTypeProvider._
override def getUsageType(element: PsiElement): UsageType =
getUsageType(element, UsageTarget.EMPTY_ARRAY)
// TODO more of these, including Scala specific: case class/object, pattern match, type ascription, ...
override def getUsageType(element: PsiElement, targets: Array[UsageTarget]): UsageType =
element.containingScalaFile.flatMap { _ =>
(element, targets) match {
case (referenceElement: ScReference, Array(only: PsiElementUsageTarget))
if isConstructorPatternReference(referenceElement) && !referenceElement.isReferenceTo(only.getElement) =>
Some(ParameterInPattern)
case (SAMTypeImplementation(_), _) if isSAMTypeUsageTarget(targets) =>
Option(SAMImplementation)
case (_: UnresolvedImplicitFakePsiElement, _) => Option(UnresolvedImplicit)
case (e, Array(target: PsiElementUsageTarget))
if isImplicitUsageTarget(target) && isReferencedImplicitlyIn(target.getElement, e) =>
Some(ImplicitConversionOrParam)
case (referenceElement: ScReference, Array(only: PsiElementUsageTarget))
if isConstructorPatternReference(referenceElement) && !referenceElement.isReferenceTo(only.getElement) =>
Some(ParameterInPattern)
case _ =>
element.withParentsInFile
.flatMap(usageType(_, element))
.headOption
}
}.orNull
}
object ScalaUsageTypeProvider {
private def isSAMTypeUsageTarget(t: Array[UsageTarget]): Boolean =
t.collect { case psiUsageTarget: PsiElementUsageTarget => psiUsageTarget.getElement }
.exists {
case cls: PsiClass => cls.isSAMable
case _ => false
}
private def isImplicitUsageTarget(target: PsiElementUsageTarget): Boolean = target.getElement match {
case ImplicitSearchTarget(_) => true
case _ => false
}
private def isReferencedImplicitlyIn(target: PsiElement, e: PsiElement): Boolean =
target.refOrImplicitRefIn(e) match {
case Some(_: ImplicitReference) => true
case _ => false
}
def referenceExpressionUsageType(expression: ScReferenceExpression): UsageType = {
def resolvedElement(result: ScalaResolveResult) =
result.innerResolveResult
.getOrElse(result).element
expression.bind()
.map(resolvedElement)
.collect {
case function: ScFunction if function.isApplyMethod => MethodApply
case definition: ScFunctionDefinition if isAncestor(definition, expression, false) => RECURSION
}.orNull
}
def patternUsageType(pattern: ScPattern): UsageType = {
def isPatternAncestor(element: PsiElement) = isAncestor(element, pattern, false)
val patterns = pattern.parentOfType(classOf[ScCatchBlock]).toSeq.collect {
case ScCatchBlock(clauses) => clauses
}.flatMap(_.caseClauses)
.flatMap(_.pattern)
if (patterns.exists(isPatternAncestor)) CLASS_CATCH_CLAUSE_PARAMETER_DECLARATION
else pattern match {
case ScTypedPattern(typeElement) if isPatternAncestor(typeElement) => ClassTypedPattern
case _: ScConstructorPattern | _: ScInfixPattern => Extractor
case _ => null
}
}
implicit def stringToUsageType(@Nls name: String): UsageType = new UsageType(() => name)
val Extractor: UsageType = ScalaBundle.message("usage.extractor")
val ClassTypedPattern: UsageType = ScalaBundle.message("usage.typed.pattern")
val TypedExpression: UsageType = ScalaBundle.message("usage.typed.statement")
val MethodApply: UsageType = ScalaBundle.message("usage.method.apply")
val ThisReference: UsageType = ScalaBundle.message("usage.this.reference")
val AccessModifier: UsageType = ScalaBundle.message("usage.access.modifier")
val PackageClause: UsageType = ScalaBundle.message("usage.package.clause")
val FunctionExpression: UsageType = ScalaBundle.message("usage.function.expression")
val NamedParameter: UsageType = ScalaBundle.message("usage.named.parameter")
val PrefixInterpolatedString: UsageType = ScalaBundle.message("usage.interpolated.string.prefix")
val ParameterInPattern: UsageType = ScalaBundle.message("usage.parameter.in.pattern")
val SelfType: UsageType = ScalaBundle.message("usage.self.type")
val TypeBound: UsageType = ScalaBundle.message("usage.type.bound")
val TypeAlias: UsageType = ScalaBundle.message("usage.type.alias")
val SecondaryConstructor: UsageType = ScalaBundle.message("usage.secondary.constructor")
val ImplicitConversionOrParam: UsageType = ScalaBundle.message("usage.implicit.conversion.parameter")
val UnresolvedImplicit: UsageType = ScalaBundle.message("usage.unresolved.implicit.conversion.parameter")
val SAMImplementation: UsageType = ScalaBundle.message("usage.sam.interface.implementation")
private def usageType(element: PsiElement, original: PsiElement): Option[UsageType] =
Option(nullableUsageType(element, original))
private def isConstructorPatternReference(element: ScReference): Boolean = element.resolve() match {
case pattern: ScBindingPattern => getParentOfType(pattern, classOf[ScConstructorPattern], classOf[ScInfixPattern]) != null
case _ => false
}
private[this] def nullableUsageType(element: PsiElement, original: PsiElement): UsageType = {
def isAppropriate(parent: PsiElement): Boolean = isAncestor(parent, original, false)
def existsAppropriate(maybeParent: Option[PsiElement]): Boolean = maybeParent.exists(isAppropriate)
element match {
case _: ScImportExpr => CLASS_IMPORT
case typeArgs: ScTypeArgs => typeArgsUsageType(typeArgs)
case templateParents: ScTemplateParents => templateParentsUsageType(templateParents)
case _: ScParameter => CLASS_METHOD_PARAMETER_DECLARATION
case pattern: ScPattern => patternUsageType(pattern)
case typeElement: ScTypeElement => typeUsageType(typeElement)
case _: ScInterpolatedExpressionPrefix => PrefixInterpolatedString
case expression: ScReferenceExpression => referenceExpressionUsageType(expression)
case expression: ScAnnotationExpr if existsAppropriate(expression.constructorInvocation.reference) => ANNOTATION
case reference: ScThisReference if existsAppropriate(reference.reference) => ThisReference
case reference: ScSuperReference if existsAppropriate(reference.reference) => DELEGATE_TO_SUPER
case _: ScAccessModifier => AccessModifier
case packaging: ScPackaging if existsAppropriate(packaging.reference) => PackageClause
case assignment: ScAssignment if isAppropriate(assignment.leftExpression) =>
if (assignment.isNamedParameter) NamedParameter else WRITE
case MethodValue(_) => FunctionExpression
case _: ScBlock | _: ScTemplateBody | _: ScEarlyDefinitions => READ
case invocation: ScSelfInvocation if !isAppropriate(invocation.args.orNull) => SecondaryConstructor
case _ => null
}
}
private[this] def typeArgsUsageType(typeArguments: ScTypeArgs): UsageType =
Option(typeArguments.getParent).collect {
case ScGenericCall(reference, Seq(_)) => reference.refName
}.collect {
case "isInstanceOf" => CLASS_INSTANCE_OF
case "asInstanceOf" => CLASS_CAST_TO
case "classOf" => CLASS_CLASS_OBJECT_ACCESS
}.getOrElse(TYPE_PARAMETER)
private[this] def templateParentsUsageType(tp: ScTemplateParents): UsageType =
getParentOfType(tp, classOf[ScTemplateDefinition], classOf[ScAnnotation]) match {
case templateDefinition: ScNewTemplateDefinition =>
if (templateDefinition.extendsBlock.isAnonymousClass) CLASS_ANONYMOUS_NEW_OPERATOR else CLASS_NEW_OPERATOR
case _: ScTemplateDefinition => CLASS_EXTENDS_IMPLEMENTS_LIST
case _ => null
}
private[this] def typeUsageType(typeElement: ScTypeElement): UsageType = {
def isAppropriate(maybeTypeElement: Option[ScTypeElement]) = maybeTypeElement.contains(typeElement)
typeElement.getParent match {
case function: ScFunction if isAppropriate(function.returnTypeElement) =>
CLASS_METHOD_RETURN_TYPE
case valueOrVariable: ScValueOrVariable if isAppropriate(valueOrVariable.typeElement) =>
if (valueOrVariable.isLocal) CLASS_LOCAL_VAR_DECLARATION else CLASS_FIELD_DECLARATION
case classParameter: ScClassParameter if isAppropriate(classParameter.typeElement) && classParameter.isClassMember =>
CLASS_FIELD_DECLARATION
case typedExpr: ScTypedExpression if isAppropriate(typedExpr.typeElement) =>
TypedExpression
case _: ScSelfTypeElement => SelfType
case _: ScTypeAliasDeclaration | _: ScTypeParam => TypeBound
case _: ScTypeAliasDefinition => TypeAlias
case _ => null
}
}
} | JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/findUsages/ScalaUsageTypeProvider.scala | Scala | apache-2.0 | 10,868 |
/*
* Copyright (c) 2017 Magomed Abdurakhmanov, Hypertino
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
*/
package com.hypertino.hyperbus.model.headers
import com.hypertino.hyperbus.model.Headers
trait HeadersConverter {
def fromHttp(headers: Seq[(String, String)]): Headers
def toHttp(headers: Headers): Seq[(String, String)]
}
| hypertino/hyperbus | hyperbus/src/main/scala/com/hypertino/hyperbus/model/headers/HeadersConverter.scala | Scala | mpl-2.0 | 496 |
/*
* Stratio Meta
*
* Copyright (c) 2014, Stratio, All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3.0 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library.
*/
package com.stratio.meta.common.ask
/**
* Types of supported API operations.
*/
object APICommand extends Enumeration{
type APICommand = Value
val LIST_CATALOGS = Value("LIST_CATALOGS")
val LIST_TABLES = Value("LIST_TABLES")
val LIST_COLUMNS = Value("LIST_COLUMNS")
}
| dhiguero/stratio-meta | meta-common/src/main/scala/com/stratio/meta/common/ask/APICommand.scala | Scala | gpl-3.0 | 990 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
import org.apache.spark.sql.catalyst.expressions.SubExprUtils._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.types._
/**
* Throws user facing errors when passed invalid queries that fail to analyze.
*/
trait CheckAnalysis extends PredicateHelper {
/**
* Override to provide additional checks for correct analysis.
* These rules will be evaluated after our built-in check rules.
*/
val extendedCheckRules: Seq[LogicalPlan => Unit] = Nil
protected def failAnalysis(msg: String): Nothing = {
throw new AnalysisException(msg)
}
protected def containsMultipleGenerators(exprs: Seq[Expression]): Boolean = {
exprs.flatMap(_.collect {
case e: Generator => e
}).length > 1
}
protected def hasMapType(dt: DataType): Boolean = {
dt.existsRecursively(_.isInstanceOf[MapType])
}
protected def mapColumnInSetOperation(plan: LogicalPlan): Option[Attribute] = plan match {
case _: Intersect | _: Except | _: Distinct =>
plan.output.find(a => hasMapType(a.dataType))
case d: Deduplicate =>
d.keys.find(a => hasMapType(a.dataType))
case _ => None
}
private def checkLimitClause(limitExpr: Expression): Unit = {
limitExpr match {
case e if !e.foldable => failAnalysis(
"The limit expression must evaluate to a constant value, but got " +
limitExpr.sql)
case e if e.dataType != IntegerType => failAnalysis(
s"The limit expression must be integer type, but got " +
e.dataType.simpleString)
case e if e.eval().asInstanceOf[Int] < 0 => failAnalysis(
"The limit expression must be equal to or greater than 0, but got " +
e.eval().asInstanceOf[Int])
case e => // OK
}
}
def checkAnalysis(plan: LogicalPlan): Unit = {
// We transform up and order the rules so as to catch the first possible failure instead
// of the result of cascading resolution failures.
plan.foreachUp {
case p if p.analyzed => // Skip already analyzed sub-plans
case u: UnresolvedRelation =>
u.failAnalysis(s"Table or view not found: ${u.tableIdentifier}")
case operator: LogicalPlan =>
operator transformExpressionsUp {
case a: Attribute if !a.resolved =>
val from = operator.inputSet.map(_.name).mkString(", ")
a.failAnalysis(s"cannot resolve '${a.sql}' given input columns: [$from]")
case e: Expression if e.checkInputDataTypes().isFailure =>
e.checkInputDataTypes() match {
case TypeCheckResult.TypeCheckFailure(message) =>
e.failAnalysis(
s"cannot resolve '${e.sql}' due to data type mismatch: $message")
}
case c: Cast if !c.resolved =>
failAnalysis(
s"invalid cast from ${c.child.dataType.simpleString} to ${c.dataType.simpleString}")
case g: Grouping =>
failAnalysis("grouping() can only be used with GroupingSets/Cube/Rollup")
case g: GroupingID =>
failAnalysis("grouping_id() can only be used with GroupingSets/Cube/Rollup")
case w @ WindowExpression(AggregateExpression(_, _, true, _), _) =>
failAnalysis(s"Distinct window functions are not supported: $w")
case w @ WindowExpression(_: OffsetWindowFunction, WindowSpecDefinition(_, order,
SpecifiedWindowFrame(frame,
FrameBoundary(l),
FrameBoundary(h))))
if order.isEmpty || frame != RowFrame || l != h =>
failAnalysis("An offset window function can only be evaluated in an ordered " +
s"row-based window frame with a single offset: $w")
case w @ WindowExpression(e, s) =>
// Only allow window functions with an aggregate expression or an offset window
// function.
e match {
case _: AggregateExpression | _: OffsetWindowFunction | _: AggregateWindowFunction =>
case _ =>
failAnalysis(s"Expression '$e' not supported within a window function.")
}
// Make sure the window specification is valid.
s.validate match {
case Some(m) =>
failAnalysis(s"Window specification $s is not valid because $m")
case None => w
}
case s @ ScalarSubquery(query, conditions, _) =>
checkAnalysis(query)
// If no correlation, the output must be exactly one column
if (conditions.isEmpty && query.output.size != 1) {
failAnalysis(
s"Scalar subquery must return only one column, but got ${query.output.size}")
} else if (conditions.nonEmpty) {
def checkAggregate(agg: Aggregate): Unit = {
// Make sure correlated scalar subqueries contain one row for every outer row by
// enforcing that they are aggregates containing exactly one aggregate expression.
// The analyzer has already checked that subquery contained only one output column,
// and added all the grouping expressions to the aggregate.
val aggregates = agg.expressions.flatMap(_.collect {
case a: AggregateExpression => a
})
if (aggregates.isEmpty) {
failAnalysis("The output of a correlated scalar subquery must be aggregated")
}
// SPARK-18504/SPARK-18814: Block cases where GROUP BY columns
// are not part of the correlated columns.
val groupByCols = AttributeSet(agg.groupingExpressions.flatMap(_.references))
// Collect the local references from the correlated predicate in the subquery.
val subqueryColumns = getCorrelatedPredicates(query).flatMap(_.references)
.filterNot(conditions.flatMap(_.references).contains)
val correlatedCols = AttributeSet(subqueryColumns)
val invalidCols = groupByCols -- correlatedCols
// GROUP BY columns must be a subset of columns in the predicates
if (invalidCols.nonEmpty) {
failAnalysis(
"A GROUP BY clause in a scalar correlated subquery " +
"cannot contain non-correlated columns: " +
invalidCols.mkString(","))
}
}
// Skip subquery aliases added by the Analyzer.
// For projects, do the necessary mapping and skip to its child.
def cleanQuery(p: LogicalPlan): LogicalPlan = p match {
case s: SubqueryAlias => cleanQuery(s.child)
case p: Project => cleanQuery(p.child)
case child => child
}
cleanQuery(query) match {
case a: Aggregate => checkAggregate(a)
case Filter(_, a: Aggregate) => checkAggregate(a)
case fail => failAnalysis(s"Correlated scalar subqueries must be Aggregated: $fail")
}
}
s
case s: SubqueryExpression =>
checkAnalysis(s.plan)
s
}
operator match {
case etw: EventTimeWatermark =>
etw.eventTime.dataType match {
case s: StructType
if s.find(_.name == "end").map(_.dataType) == Some(TimestampType) =>
case _: TimestampType =>
case _ =>
failAnalysis(
s"Event time must be defined on a window or a timestamp, but " +
s"${etw.eventTime.name} is of type ${etw.eventTime.dataType.simpleString}")
}
case f: Filter if f.condition.dataType != BooleanType =>
failAnalysis(
s"filter expression '${f.condition.sql}' " +
s"of type ${f.condition.dataType.simpleString} is not a boolean.")
case Filter(condition, _) if hasNullAwarePredicateWithinNot(condition) =>
failAnalysis("Null-aware predicate sub-queries cannot be used in nested " +
s"conditions: $condition")
case j @ Join(_, _, _, Some(condition)) if condition.dataType != BooleanType =>
failAnalysis(
s"join condition '${condition.sql}' " +
s"of type ${condition.dataType.simpleString} is not a boolean.")
case Aggregate(groupingExprs, aggregateExprs, child) =>
def checkValidAggregateExpression(expr: Expression): Unit = expr match {
case aggExpr: AggregateExpression =>
aggExpr.aggregateFunction.children.foreach { child =>
child.foreach {
case agg: AggregateExpression =>
failAnalysis(
s"It is not allowed to use an aggregate function in the argument of " +
s"another aggregate function. Please use the inner aggregate function " +
s"in a sub-query.")
case other => // OK
}
if (!child.deterministic) {
failAnalysis(
s"nondeterministic expression ${expr.sql} should not " +
s"appear in the arguments of an aggregate function.")
}
}
case e: Attribute if groupingExprs.isEmpty =>
// Collect all [[AggregateExpressions]]s.
val aggExprs = aggregateExprs.filter(_.collect {
case a: AggregateExpression => a
}.nonEmpty)
failAnalysis(
s"grouping expressions sequence is empty, " +
s"and '${e.sql}' is not an aggregate function. " +
s"Wrap '${aggExprs.map(_.sql).mkString("(", ", ", ")")}' in windowing " +
s"function(s) or wrap '${e.sql}' in first() (or first_value) " +
s"if you don't care which value you get."
)
case e: Attribute if !groupingExprs.exists(_.semanticEquals(e)) =>
failAnalysis(
s"expression '${e.sql}' is neither present in the group by, " +
s"nor is it an aggregate function. " +
"Add to group by or wrap in first() (or first_value) if you don't care " +
"which value you get.")
case e if groupingExprs.exists(_.semanticEquals(e)) => // OK
case e => e.children.foreach(checkValidAggregateExpression)
}
def checkValidGroupingExprs(expr: Expression): Unit = {
if (expr.find(_.isInstanceOf[AggregateExpression]).isDefined) {
failAnalysis(
"aggregate functions are not allowed in GROUP BY, but found " + expr.sql)
}
// Check if the data type of expr is orderable.
if (!RowOrdering.isOrderable(expr.dataType)) {
failAnalysis(
s"expression ${expr.sql} cannot be used as a grouping expression " +
s"because its data type ${expr.dataType.simpleString} is not an orderable " +
s"data type.")
}
if (!expr.deterministic) {
// This is just a sanity check, our analysis rule PullOutNondeterministic should
// already pull out those nondeterministic expressions and evaluate them in
// a Project node.
failAnalysis(s"nondeterministic expression ${expr.sql} should not " +
s"appear in grouping expression.")
}
}
groupingExprs.foreach(checkValidGroupingExprs)
aggregateExprs.foreach(checkValidAggregateExpression)
case Sort(orders, _, _) =>
orders.foreach { order =>
if (!RowOrdering.isOrderable(order.dataType)) {
failAnalysis(
s"sorting is not supported for columns of type ${order.dataType.simpleString}")
}
}
case GlobalLimit(limitExpr, _) => checkLimitClause(limitExpr)
case LocalLimit(limitExpr, _) => checkLimitClause(limitExpr)
case p if p.expressions.exists(ScalarSubquery.hasCorrelatedScalarSubquery) =>
p match {
case _: Filter | _: Aggregate | _: Project => // Ok
case other => failAnalysis(
s"Correlated scalar sub-queries can only be used in a Filter/Aggregate/Project: $p")
}
case p if p.expressions.exists(SubqueryExpression.hasInOrExistsSubquery) =>
p match {
case _: Filter => // Ok
case _ => failAnalysis(s"Predicate sub-queries can only be used in a Filter: $p")
}
case _: Union | _: SetOperation if operator.children.length > 1 =>
def dataTypes(plan: LogicalPlan): Seq[DataType] = plan.output.map(_.dataType)
def ordinalNumber(i: Int): String = i match {
case 0 => "first"
case 1 => "second"
case i => s"${i}th"
}
val ref = dataTypes(operator.children.head)
operator.children.tail.zipWithIndex.foreach { case (child, ti) =>
// Check the number of columns
if (child.output.length != ref.length) {
failAnalysis(
s"""
|${operator.nodeName} can only be performed on tables with the same number
|of columns, but the first table has ${ref.length} columns and
|the ${ordinalNumber(ti + 1)} table has ${child.output.length} columns
""".stripMargin.replace("\\n", " ").trim())
}
// Check if the data types match.
dataTypes(child).zip(ref).zipWithIndex.foreach { case ((dt1, dt2), ci) =>
// SPARK-18058: we shall not care about the nullability of columns
if (TypeCoercion.findWiderTypeForTwo(dt1.asNullable, dt2.asNullable).isEmpty) {
failAnalysis(
s"""
|${operator.nodeName} can only be performed on tables with the compatible
|column types. ${dt1.catalogString} <> ${dt2.catalogString} at the
|${ordinalNumber(ci)} column of the ${ordinalNumber(ti + 1)} table
""".stripMargin.replace("\\n", " ").trim())
}
}
}
case _ => // Fallbacks to the following checks
}
operator match {
case o if o.children.nonEmpty && o.missingInput.nonEmpty =>
val missingAttributes = o.missingInput.mkString(",")
val input = o.inputSet.mkString(",")
failAnalysis(
s"resolved attribute(s) $missingAttributes missing from $input " +
s"in operator ${operator.simpleString}")
case p @ Project(exprs, _) if containsMultipleGenerators(exprs) =>
failAnalysis(
s"""Only a single table generating function is allowed in a SELECT clause, found:
| ${exprs.map(_.sql).mkString(",")}""".stripMargin)
case j: Join if !j.duplicateResolved =>
val conflictingAttributes = j.left.outputSet.intersect(j.right.outputSet)
failAnalysis(
s"""
|Failure when resolving conflicting references in Join:
|$plan
|Conflicting attributes: ${conflictingAttributes.mkString(",")}
|""".stripMargin)
case i: Intersect if !i.duplicateResolved =>
val conflictingAttributes = i.left.outputSet.intersect(i.right.outputSet)
failAnalysis(
s"""
|Failure when resolving conflicting references in Intersect:
|$plan
|Conflicting attributes: ${conflictingAttributes.mkString(",")}
""".stripMargin)
case e: Except if !e.duplicateResolved =>
val conflictingAttributes = e.left.outputSet.intersect(e.right.outputSet)
failAnalysis(
s"""
|Failure when resolving conflicting references in Except:
|$plan
|Conflicting attributes: ${conflictingAttributes.mkString(",")}
""".stripMargin)
// TODO: although map type is not orderable, technically map type should be able to be
// used in equality comparison, remove this type check once we support it.
case o if mapColumnInSetOperation(o).isDefined =>
val mapCol = mapColumnInSetOperation(o).get
failAnalysis("Cannot have map type columns in DataFrame which calls " +
s"set operations(intersect, except, etc.), but the type of column ${mapCol.name} " +
"is " + mapCol.dataType.simpleString)
case o if o.expressions.exists(!_.deterministic) &&
!o.isInstanceOf[Project] && !o.isInstanceOf[Filter] &&
!o.isInstanceOf[Aggregate] && !o.isInstanceOf[Window] =>
// The rule above is used to check Aggregate operator.
failAnalysis(
s"""nondeterministic expressions are only allowed in
|Project, Filter, Aggregate or Window, found:
| ${o.expressions.map(_.sql).mkString(",")}
|in operator ${operator.simpleString}
""".stripMargin)
case _: Hint =>
throw new IllegalStateException(
"Internal error: logical hint operator should have been removed during analysis")
case _ => // Analysis successful!
}
}
extendedCheckRules.foreach(_(plan))
plan.foreachUp {
case o if !o.resolved => failAnalysis(s"unresolved operator ${o.simpleString}")
case _ =>
}
plan.foreach(_.setAnalyzed())
}
}
| jrshust/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala | Scala | apache-2.0 | 19,198 |
/*
* Copyright 2017 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.httpclient
import java.net.URI
import org.scalatest.{FlatSpecLike, Matchers}
import org.squbs.resolver.ResolverRegistry
import org.squbs.testkit.CustomTestKit
import org.scalatest.OptionValues._
class DefaultHttpEndpointResolverSpec extends CustomTestKit with FlatSpecLike with Matchers {
ResolverRegistry(system).register(new DefaultHttpEndpointResolver)
private def resolve(uri: String) = ResolverRegistry(system).resolve[HttpEndpoint](uri).value
it should "resolve valid http uri string to an HttpEndpoint" in {
resolve("http://akka.io:80") shouldBe HttpEndpoint(URI.create("http://akka.io:80"))
resolve("http://akka.io") shouldBe HttpEndpoint(URI.create("http://akka.io"))
}
it should "resolve valid https uri string to an HttpEndpoint" in {
resolve("https://akka.io:443") shouldBe HttpEndpoint(URI.create("https://akka.io:443"))
resolve("https://akka.io") shouldBe HttpEndpoint(URI.create("https://akka.io"))
}
it should "not resolve invalid http uri string to an HttpEndpoint" in {
ResolverRegistry(system).resolve[HttpEndpoint]("invalidUri:") shouldBe empty
ResolverRegistry(system).resolve[HttpEndpoint]("ftp://akka.io") shouldBe empty
}
it should "set the resolver name to the class name" in {
(new DefaultHttpEndpointResolver).name shouldEqual "org.squbs.httpclient.DefaultHttpEndpointResolver"
}
}
| Harikiranvuyyuru/squbs | squbs-httpclient/src/test/scala/org/squbs/httpclient/DefaultHttpEndpointResolverSpec.scala | Scala | apache-2.0 | 1,971 |
package sangria.marshalling
import scala.annotation.implicitNotFound
import scala.collection.immutable.VectorBuilder
trait ResultMarshaller {
type Node
type MapBuilder
def emptyMapNode(keys: Seq[String]): MapBuilder
def addMapNodeElem(builder: MapBuilder, key: String, value: Node, optional: Boolean): MapBuilder
def mapNode(builder: MapBuilder): Node
def mapNode(keyValues: Seq[(String, Node)]): Node
def arrayNode(values: Vector[Node]): Node
def optionalArrayNodeValue(value: Option[Node]): Node
/** Marshals a coerced scalar value
*
* Following scala types must be supported:
*
* - String
* - Boolean
* - Int
* - Long
* - Float
* - Double
* - scala.BigInt
* - scala.BigDecimal
*
* Implementation may also support additional scala types if underlying data format supports them
* (like Dates, or BLOBs).
*
* @param value
* coerced scalar value
* @return
* marshaled node
*/
def scalarNode(value: Any, typeName: String, info: Set[ScalarValueInfo]): Node
def enumNode(value: String, typeName: String): Node
def nullNode: Node
def renderCompact(node: Node): String
def renderPretty(node: Node): String
def mapAndMarshal[T](seq: Seq[T], fn: T => Node): Node = {
val res = new VectorBuilder[Node]
for (elem <- seq)
res += fn(elem)
arrayNode(res.result())
}
def capabilities: Set[MarshallerCapability] = Set.empty
}
object ResultMarshaller {
implicit val defaultResultMarshaller: ScalaResultMarshaller =
scalaMarshalling.scalaResultMarshaller
}
/** Alters the behaviour of the executor and marshals raw (in-scala coerced representation) or
* scalar values and enums.
*/
trait RawResultMarshaller extends ResultMarshaller {
def rawScalarNode(rawValue: Any): Node
private def onlyRawValuesExpected =
throw new IllegalArgumentException("Only raw values expected in `RawResultMarshaller`!")
final def scalarNode(value: Any, typeName: String, info: Set[ScalarValueInfo]) =
onlyRawValuesExpected
final def enumNode(value: String, typeName: String) = onlyRawValuesExpected
}
@implicitNotFound(
"Type ${T} cannot be marshaled. Please consider defining an implicit instance of `ResultMarshallerForType` for it or import appropriate marshaling from `sangria.marshalling`.")
trait ResultMarshallerForType[+T] {
def marshaller: ResultMarshaller
}
| sangria-graphql/sangria-marshalling-api | src/main/scala/sangria/marshalling/ResultMarshaller.scala | Scala | apache-2.0 | 2,430 |
package mesosphere.marathon
package core.task.update.impl.steps
import akka.actor.ActorRef
import com.google.inject.Provider
import mesosphere.UnitTest
import mesosphere.marathon.MarathonSchedulerActor.ScaleRunSpec
import mesosphere.marathon.core.condition.Condition
import mesosphere.marathon.core.event.MarathonEvent
import mesosphere.marathon.core.instance.update.InstanceUpdated
import mesosphere.marathon.core.instance.{Goal, Instance, TestInstanceBuilder}
import mesosphere.marathon.state.{AbsolutePathId, Timestamp}
class ScaleAppUpdateStepImplTest extends UnitTest {
// used pattern matching because of compiler checks, when additional case objects are added to Condition
def scalingWorthy: Condition => Boolean = {
case Condition.Scheduled | Condition.Provisioned | Condition.Killing | Condition.Running | Condition.Staging | Condition.Starting |
Condition.Unreachable =>
false
case Condition.Error | Condition.Failed | Condition.Finished | Condition.Killed | Condition.UnreachableInactive | Condition.Gone |
Condition.Dropped | Condition.Unknown =>
true
}
val allConditions = Seq(
Condition.Provisioned,
Condition.Scheduled,
Condition.Error,
Condition.Failed,
Condition.Finished,
Condition.Killed,
Condition.Killing,
Condition.Running,
Condition.Staging,
Condition.Starting,
Condition.Unreachable,
Condition.UnreachableInactive,
Condition.Gone,
Condition.Dropped,
Condition.Unknown
)
val scalingWorthyConditions = allConditions.filter(scalingWorthy)
val notScalingWorthyConditions = allConditions.filterNot(scalingWorthy)
"ScaleAppUpdateStep" when {
"receiving multiple failed tasks" should {
val f = new Fixture
val instance = TestInstanceBuilder
.newBuilder(AbsolutePathId("/app"))
.addTaskUnreachable(containerName = Some("unreachable1"))
.getInstance()
"send a scale request to the scheduler actor" in {
val failedUpdate1 = f.makeFailedUpdateOp(instance, Some(Condition.Running), Condition.Failed)
f.step.calcScaleEvent(failedUpdate1) should be(Some(ScaleRunSpec(instance.runSpecId)))
}
"not send a scale request again" in {
val failedUpdate2 = f.makeFailedUpdateOp(instance, Some(Condition.Failed), Condition.Failed)
f.step.calcScaleEvent(failedUpdate2) should be(None)
}
}
notScalingWorthyConditions.foreach { newStatus =>
s"receiving a not scaling worthy status update '$newStatus' on a previously scaling worthy condition" should {
val f = new Fixture
val instance = TestInstanceBuilder
.newBuilder(AbsolutePathId("/app"))
.addTaskUnreachable(containerName = Some("unreachable1"))
.getInstance()
val update = f.makeFailedUpdateOp(instance, Some(Condition.Failed), newStatus)
"send no requests" in {
f.step.calcScaleEvent(update) should be(None)
}
}
}
scalingWorthyConditions.foreach { newStatus =>
s"receiving a scaling worthy status update '$newStatus' on a previously scaling worthy condition" should {
val f = new Fixture
val instance = TestInstanceBuilder
.newBuilder(AbsolutePathId("/app"))
.addTaskFailed(containerName = Some("failed1"))
.getInstance()
val update = f.makeFailedUpdateOp(instance, Some(Condition.Failed), newStatus)
"send no requests" in {
f.step.calcScaleEvent(update) should be(None)
}
}
}
scalingWorthyConditions.foreach { newStatus =>
s"receiving a scaling worthy status update '$newStatus' on a previously non scaling worthy condition" should {
val f = new Fixture
val instance = TestInstanceBuilder
.newBuilder(AbsolutePathId("/app"))
.addTaskRunning(containerName = Some("running1"))
.getInstance()
val update = f.makeFailedUpdateOp(instance, Some(Condition.Running), newStatus)
"send ScaleRunSpec requests" in {
f.step.calcScaleEvent(update) should be(Some(ScaleRunSpec(instance.runSpecId)))
}
}
}
"receiving a task failed without lastState" should {
val f = new Fixture
val instance = TestInstanceBuilder
.newBuilder(AbsolutePathId("/app"))
.addTaskUnreachable(containerName = Some("unreachable1"))
.getInstance()
"send a scale request to the scheduler actor" in {
val update = f.makeFailedUpdateOp(instance, None, Condition.Failed)
f.step.calcScaleEvent(update) should be(Some(ScaleRunSpec(instance.runSpecId)))
}
"send no more requests" in {
val update = f.makeFailedUpdateOp(instance, Some(Condition.Failed), Condition.Failed)
f.step.calcScaleEvent(update) should be(None)
}
}
}
class Fixture {
private[this] val schedulerActorProvider = mock[Provider[ActorRef]]
def makeFailedUpdateOp(instance: Instance, lastCondition: Option[Condition], newCondition: Condition) =
InstanceUpdated(
instance.copy(state = instance.state.copy(condition = newCondition)),
lastCondition.map(state => Instance.InstanceState(state, Timestamp.now(), Some(Timestamp.now()), Some(true), Goal.Running)),
Seq.empty[MarathonEvent]
)
val step = new ScaleAppUpdateStepImpl(schedulerActorProvider)
}
}
| mesosphere/marathon | src/test/scala/mesosphere/marathon/core/task/update/impl/steps/ScaleAppUpdateStepImplTest.scala | Scala | apache-2.0 | 5,417 |
package org.airpnp.actor
import java.io.File
import org.airpnp.dlna.DLNAPublisher
import org.airpnp.Logging
trait TestMode extends Logging {
private[actor] def maybeAddTestContent(publisher: DLNAPublisher) {
if (!isTestMode) {
return
}
debug("AirPnp test mode detected, publishing test content.")
val photoUrl = publisher.publishPhoto("photo1", () => getClass.getResourceAsStream("/org/airpnp/lena.jpg"), 27172)
debug("-- photo URL is: {}", photoUrl)
val videoUrl = publisher.publishMovie("video1", "http://www.cybertechmedia.com/samples/hunterdouglas.mov")
debug("-- video URL is: {}", videoUrl)
}
private def isTestMode() = new File(System.getProperty("java.io.tmpdir"), "airpnp.test").exists
} | provegard/ScAirPnp | src/main/scala/org/airpnp/actor/TestMode.scala | Scala | mit | 739 |
import sbt._, Keys._
import skinny.scalate.ScalatePlugin._, ScalateKeys._
import skinny.servlet._, ServletPlugin._, ServletKeys._
import org.sbtidea.SbtIdeaPlugin._
import scala.language.postfixOps
object SkinnyAppBuild extends Build {
// -------------------------------------------------------
// Common Settings
// -------------------------------------------------------
val appOrganization = "org.skinny-framework"
val appName = "skinny-blank-app"
val appVersion = "0.1.0-SNAPSHOT"
val skinnyVersion = "2.0.0"
val theScalaVersion = "2.11.7"
val jettyVersion = "9.2.14.v20151106"
def applyDefaultExclusions(m: ModuleID) = m.excludeAll(
ExclusionRule("commons-dbcp", "commons-dbcp")
)
lazy val baseSettings = servletSettings ++ Seq(
organization := appOrganization,
name := appName,
version := appVersion,
scalaVersion := theScalaVersion,
dependencyOverrides := Set(
"org.scala-lang" % "scala-library" % scalaVersion.value,
"org.scala-lang" % "scala-reflect" % scalaVersion.value,
"org.scala-lang" % "scala-compiler" % scalaVersion.value,
"org.scala-lang.modules" %% "scala-xml" % "1.0.5",
"org.scala-lang.modules" %% "scala-parser-combinators" % "1.0.4",
"org.slf4j" % "slf4j-api" % "1.7.13"
),
libraryDependencies ++= Seq(
"org.skinny-framework" %% "skinny-framework" % skinnyVersion,
"org.skinny-framework" %% "skinny-assets" % skinnyVersion,
"org.skinny-framework" %% "skinny-task" % skinnyVersion,
"org.skinny-framework" % "skinny-logback" % "1.0.6",
"com.h2database" % "h2" % "1.4.190", // your own JDBC driver
"org.skinny-framework" %% "skinny-factory-girl" % skinnyVersion % "test",
"org.skinny-framework" %% "skinny-test" % skinnyVersion % "test",
// for Skinny 1.x tests compatibility
// "org.scalatra" %% "scalatra-scalatest" % "2.3.1" % "test"
"org.eclipse.jetty" % "jetty-webapp" % jettyVersion % "container",
"org.eclipse.jetty" % "jetty-plus" % jettyVersion % "container",
"javax.servlet" % "javax.servlet-api" % "3.1.0" % "container;provided;test"
).map(applyDefaultExclusions),
resolvers ++= Seq(
"sonatype releases" at "https://oss.sonatype.org/content/repositories/releases"
//, "sonatype snapshots" at "https://oss.sonatype.org/content/repositories/snapshots"
),
// Faster "./skinny idea"
transitiveClassifiers in Global := Seq(Artifact.SourceClassifier),
// the name-hashing algorithm for the incremental compiler.
incOptions := incOptions.value.withNameHashing(true),
updateOptions := updateOptions.value.withCachedResolution(true),
logBuffered in Test := false,
javaOptions in Test ++= Seq("-Dskinny.env=test"),
fork in Test := true,
scalacOptions ++= Seq("-unchecked", "-deprecation", "-feature"),
ideaExcludeFolders := Seq(".idea", ".idea_modules", "db", "target", "task/target", "build", "standalone-build", "node_modules")
)
lazy val scalatePrecompileSettings = scalateSettings ++ Seq(
scalateTemplateConfig in Compile <<= (sourceDirectory in Compile){ base =>
Seq( TemplateConfig(file(".") / "src" / "main" / "webapp" / "WEB-INF",
// These imports should be same as src/main/scala/templates/ScalatePackage.scala
Seq("import controller._", "import model._"),
Seq(Binding("context", "_root_.skinny.micro.contrib.scalate.SkinnyScalateRenderContext", importMembers = true, isImplicit = true)),
Some("templates")))
}
)
lazy val jettyOrbitHack = Seq(ivyXML := <dependencies><exclude org="org.eclipse.jetty.orbit" /></dependencies>)
// -------------------------------------------------------
// Development
// -------------------------------------------------------
lazy val devBaseSettings = baseSettings ++ Seq(
unmanagedClasspath in Test <+= (baseDirectory) map { bd => Attributed.blank(bd / "src/main/webapp") },
// Integration tests become slower when multiple controller tests are loaded in the same time
parallelExecution in Test := false,
port in container.Configuration := 8080
)
lazy val dev = Project(id = "dev", base = file("."),
settings = devBaseSettings ++ Seq(
name := appName + "-dev",
target := baseDirectory.value / "target" / "dev"
)
)
lazy val precompileDev = Project(id = "precompileDev", base = file("."),
settings = devBaseSettings ++ scalatePrecompileSettings ++ Seq(
name := appName + "-precompile-dev",
target := baseDirectory.value / "target" / "precompile-dev",
ideaIgnoreModule := true
)
)
// -------------------------------------------------------
// Task Runner
// -------------------------------------------------------
lazy val task = Project(id = "task", base = file("task"),
settings = baseSettings ++ Seq(
mainClass := Some("TaskRunner"),
name := appName + "-task"
)
) dependsOn(dev)
// -------------------------------------------------------
// Packaging
// -------------------------------------------------------
lazy val packagingBaseSettings = baseSettings ++ scalatePrecompileSettings ++ Seq(
sources in doc in Compile := List(),
publishTo <<= version { (v: String) =>
val base = "https://oss.sonatype.org/"
if (v.trim.endsWith("SNAPSHOT")) Some("snapshots" at base + "content/repositories/snapshots")
else Some("releases" at base + "service/local/staging/deploy/maven2")
}
)
lazy val build = Project(id = "build", base = file("build"),
settings = packagingBaseSettings ++ Seq(
name := appName,
ideaIgnoreModule := true
)
)
lazy val standaloneBuild = Project(id = "standalone-build", base = file("standalone-build"),
settings = packagingBaseSettings ++ Seq(
name := appName + "-standalone",
libraryDependencies += "org.skinny-framework" %% "skinny-standalone" % skinnyVersion,
ideaIgnoreModule := true
) ++ jettyOrbitHack
)
}
| ijufumi/demo-scala | skinny-blank-app/project/Build.scala | Scala | mit | 6,275 |
package com.outr.stripe.bank
import scala.scalajs.js
@js.native
trait StripeBankResponse extends js.Object {
def country: String = js.native
def bank_name: String = js.native
def last4: String = js.native
def validated: Boolean = js.native
def `object`: String = js.native
}
| outr/scala-stripe | core/js/src/main/scala/com/outr/stripe/bank/StripeBankResponse.scala | Scala | mit | 287 |
/*
* Copyright © 2015-2019 the contributors (see Contributors.md).
*
* This file is part of Knora.
*
* Knora is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Knora is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with Knora. If not, see <http://www.gnu.org/licenses/>.
*/
package org.knora.webapi.responders.v2
import akka.testkit.{ImplicitSender, TestActorRef}
import org.knora.webapi.messages.store.triplestoremessages.RdfDataObject
import org.knora.webapi.messages.v2.responder.listsmessages.{ListGetRequestV2, ListGetResponseV2, NodeGetRequestV2, NodeGetResponseV2}
import org.knora.webapi.util.StringFormatter
import org.knora.webapi.{CoreSpec, SharedTestDataADM}
import scala.concurrent.duration._
object ListsResponderV2Spec {
private val userProfile = SharedTestDataADM.anythingUser2
}
/**
* Tests [[ListsResponderV2]].
*/
class ListsResponderV2Spec extends CoreSpec() with ImplicitSender {
import ListsResponderV2Spec._
private implicit val stringFormatter: StringFormatter = StringFormatter.getGeneralInstance
private val listsResponderV2SpecFullData = new ListsResponderV2SpecFullData
override lazy val rdfDataObjects = List(
RdfDataObject(path = "_test_data/all_data/incunabula-data.ttl", name = "http://www.knora.org/data/0803/incunabula"),
RdfDataObject(path = "_test_data/demo_data/images-demo-data.ttl", name = "http://www.knora.org/data/00FF/images"),
RdfDataObject(path = "_test_data/all_data/anything-data.ttl", name = "http://www.knora.org/data/0001/anything")
)
// The default timeout for receiving reply messages from actors.
private val timeout = 10.seconds
"The lists responder v2" should {
"return a list" in {
responderManager ! ListGetRequestV2("http://rdfh.ch/lists/0001/treeList", userProfile)
expectMsgPF(timeout) {
case response: ListGetResponseV2 =>
assert(response == listsResponderV2SpecFullData.treeList)
}
}
"return a node" in {
responderManager ! NodeGetRequestV2("http://rdfh.ch/lists/0001/treeList11", userProfile)
expectMsgPF(timeout) {
case response: NodeGetResponseV2 =>
assert(response == listsResponderV2SpecFullData.treeNode)
}
}
}
} | musicEnfanthen/Knora | webapi/src/test/scala/org/knora/webapi/responders/v2/ListsResponderV2Spec.scala | Scala | agpl-3.0 | 2,828 |
package org.littlewings.infinispan.query
import scala.collection.JavaConverters._
import java.text.SimpleDateFormat
import org.infinispan.Cache
import org.infinispan.manager.DefaultCacheManager
import org.infinispan.query.Search
import org.scalatest.FunSpec
import org.scalatest.Matchers._
import org.littlewings.infinispan.query.entity.Book
class InfinispanQuerySpec extends FunSpec {
val toDate = (dateString: String) => new SimpleDateFormat("yyyy/MM/dd").parse(dateString)
val luceneBook: Book =
Book("978-4774127804",
"Apache Lucene 入門 ~Java・オープンソース・全文検索システムの構築",
"Luceneは全文検索システムを構築するためのJavaのライブラリです。",
3360,
toDate("2006/05/17"))
val solrBook: Book =
Book("978-4774161631",
"[改訂新版] Apache Solr入門 オープンソース全文検索エンジン",
"最新版Apaceh Solr Ver.4.5.1に対応するため大幅な書き直しと原稿の追加を行い、現在の開発環境に合わせて完全にアップデートしました。Apache Solrは多様なプログラミング言語に対応した全文検索エンジンです。",
3780,
toDate("2013/11/29"))
val collectiveIntelligenceInActionBook: Book =
Book("978-4797352009",
"集合知イン・アクション",
"レコメンデーションエンジンをつくるには?ブログやSNSのテキスト分析、ユーザー嗜好の予測モデル、レコメンデーションエンジン……Web 2.0の鍵「集合知」をJavaで実装しよう!",
3990,
toDate("2009/03/27"))
val books: Array[Book] = Array(luceneBook, solrBook, collectiveIntelligenceInActionBook)
describe("infinispan query spec") {
it("keyword query") {
withCache { cache =>
books.foreach(book => cache.put(book.isbn, book))
val searchManager = Search.getSearchManager(cache)
val queryBuilder = searchManager.buildQueryBuilderForClass(classOf[Book]).get
val luceneQuery =
queryBuilder
.keyword
.onField("title")
.andField("summary")
.matching("オープンソース 全文検索システムの構築")
.createQuery
luceneQuery.toString should be ("(title:オープン title:ソース title:全文 title:検索 title:システム title:構築) (summary:オープン summary:ソース summary:全文 summary:検索 summary:システム summary:構築)")
val query = searchManager.getQuery(luceneQuery, classOf[Book])
val result = query.list
result should have size 2
result.get(0) should be (luceneBook)
result.get(1) should be (solrBook)
}
}
it("phrase query") {
withCache { cache =>
books.foreach(book => cache.put(book.isbn, book))
val searchManager = Search.getSearchManager(cache)
val queryBuilder = searchManager.buildQueryBuilderForClass(classOf[Book]).get
val luceneQuery =
queryBuilder
.phrase
.onField("title")
.andField("summary")
.sentence("オープンソース 全文検索システムの構築")
.createQuery
luceneQuery.toString should be ("title:\\"オープン ソース 全文 検索 システム ? 構築\\" summary:\\"オープン ソース 全文 検索 システム ? 構築\\"")
val query = searchManager.getQuery(luceneQuery, classOf[Book])
val result = query.list
result should have size 1
result.get(0) should be (luceneBook)
}
}
it("range query") {
withCache { cache =>
books.foreach(book => cache.put(book.isbn, book))
val searchManager = Search.getSearchManager(cache)
val queryBuilder = searchManager.buildQueryBuilderForClass(classOf[Book]).get
val luceneQuery =
queryBuilder
.range
.onField("price")
.from(3500)
.to(4000)
.createQuery
luceneQuery.toString should be ("price:[3500 TO 4000]")
val query = searchManager.getQuery(luceneQuery, classOf[Book])
val result = query.list
result should have size 2
result.get(0) should be (solrBook)
result.get(1) should be (collectiveIntelligenceInActionBook)
}
}
it("bool query") {
withCache { cache =>
books.foreach(book => cache.put(book.isbn, book))
val searchManager = Search.getSearchManager(cache)
val queryBuilder = searchManager.buildQueryBuilderForClass(classOf[Book]).get
val luceneQuery =
queryBuilder
.bool
.should {
queryBuilder
.keyword
.onField("title")
.matching("全文検索")
.createQuery
}.should {
queryBuilder
.keyword
.onField("summary")
.matching("java")
.createQuery
}.createQuery
luceneQuery.toString should be ("(title:全文 title:検索) summary:java")
val query = searchManager.getQuery(luceneQuery, classOf[Book])
val result = query.list
result should have size 3
result.get(0) should be (luceneBook)
result.get(1) should be (solrBook)
result.get(2) should be (collectiveIntelligenceInActionBook)
}
}
it("clustered spec") {
withCache { _ =>
withCache { cache =>
books.foreach(book => cache.put(book.isbn, book))
}
withCache { cache =>
val searchManager = Search.getSearchManager(cache)
val queryBuilder = searchManager.buildQueryBuilderForClass(classOf[Book]).get
val luceneQuery =
queryBuilder
.keyword
.onField("title")
.andField("summary")
.matching("オープンソース 全文検索システムの構築")
.createQuery
luceneQuery.toString should be ("(title:オープン title:ソース title:全文 title:検索 title:システム title:構築) (summary:オープン summary:ソース summary:全文 summary:検索 summary:システム summary:構築)")
val query = searchManager.getQuery(luceneQuery, classOf[Book])
val result = query.list
result should have size 2
result.get(0) should be (luceneBook)
result.get(1) should be (solrBook)
cache.getCacheManager.getCacheNames should contain theSameElementsAs(Array("bookCache",
"My-LuceneIndexesLocking",
"My-LuceneIndexesData",
"My-LuceneIndexesMetadata",
"__cluster_registry_cache__"))
}
}
}
}
def withCache(fun: Cache[String, Book] => Unit): Unit = {
val manager = new DefaultCacheManager("infinispan.xml")
try {
val cache = manager.getCache[String, Book]("bookCache")
try {
fun(cache)
} finally {
cache.stop()
}
} finally {
manager.stop()
}
}
}
| kazuhira-r/infinispan-examples | infinispan-query-config-analyzerdef/src/test/scala/org/littlewings/infinispan/query/InfinispanQuerySpec.scala | Scala | mit | 7,524 |
/*
Copyright 2012-2015, University of Geneva.
This file is part of Great Balls of Fire (GBF).
Great Balls of Fire (GBF) is free software: you can redistribute it
and/or modify it under the terms of the GNU General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
Great Balls of Fire (GBF) is distributed in the hope that it will be
useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with Great Balls of Fire (GBF). If not, see
<http://www.gnu.org/licenses/>.
*/
package ch.unige.gbf
package terrain
import scala.io.Source
import terrain._
import util.arrays._
case class VolcanoDEM( terrain: DEM, vent: SafeArray[Double] )
object VolcanoDEM {
import org.streum.configrity._
import util.conf._
lazy val ventReader = for {
e <- read[Double]( "E" )
n <- read[Double]( "N" )
a <- read[Double]( "altitude" )
} yield safe( Array( e, n, a ) )
lazy val reader = for {
demFile <- read[String]( "demFile" )
vent <- ventReader.detaching( "vent" )
} yield {
val dem = DEM.read( demFile )
VolcanoDEM( dem, vent )
}
lazy val default = VolcanoDEM(
DEM.read( "dem/dem_10m.txt" ),
safe( Array( 496682.0, 4250641.0, 400.0 ) )
)
}
| unigeSPC/gbf | simulator/src/terrain/VolcanoDEM.scala | Scala | gpl-3.0 | 1,482 |
package scala.pickling
package runtime
import scala.reflect.{runtime => reflectRuntime}
import internal._
// TODO - Move all these into the "PicklerRegistry"
object CustomRuntime {
def mkRuntimeTravPickler[C <% Traversable[_]](elemClass: Class[_], elemTag: FastTypeTag[_], collTag: FastTypeTag[_],
elemPickler0: Pickler[_], elemUnpickler0: Unpickler[_]):
Pickler[C] with Unpickler[C] = new Pickler[C] with Unpickler[C] {
val elemPickler = elemPickler0.asInstanceOf[Pickler[AnyRef]]
val elemUnpickler = elemUnpickler0.asInstanceOf[Unpickler[AnyRef]]
val isPrimitive = elemTag.tpe.isEffectivelyPrimitive
def tag: FastTypeTag[C] = collTag.asInstanceOf[FastTypeTag[C]]
def pickle(coll: C, builder: PBuilder): Unit = {
builder.beginEntry(coll, tag)
builder.beginCollection(coll.size)
builder.pushHints()
if (isPrimitive) {
builder.hintElidedType(elemTag)
builder.pinHints()
}
(coll: Traversable[_]).asInstanceOf[Traversable[AnyRef]].foreach { (elem: AnyRef) =>
builder putElement { b =>
elemPickler.pickle(elem, b)
}
}
builder.popHints()
builder.endCollection()
builder.endEntry()
}
def unpickle(tag: String, preader: PReader): Any = {
val reader = preader.beginCollection()
preader.pushHints()
if (isPrimitive) {
reader.hintElidedType(elemTag)
reader.pinHints()
}
val length = reader.readLength()
val newArray = java.lang.reflect.Array.newInstance(elemClass, length).asInstanceOf[Array[AnyRef]]
var i = 0
while (i < length) {
try {
val r = reader.readElement()
val elem = elemUnpickler.unpickleEntry(r)
newArray(i) = elem.asInstanceOf[AnyRef]
i = i + 1
} catch {
case PicklingException(msg, cause) =>
throw PicklingException(s"""error in unpickle of 'mkRuntimeTravPickler':
|collTag: '${collTag.key}'
|elemTag: '${elemTag.key}'
|message:
|$msg""".stripMargin, cause)
case e: Exception =>
e.printStackTrace()
throw PicklingException(s"""exception in unpickle of 'mkRuntimeTravPickler':
|collTag: '${collTag.key}'
|elemTag: '${elemTag.key}'""".stripMargin, Some(e))
}
}
preader.popHints()
preader.endCollection()
newArray
}
}
}
class Tuple2RTKnownTagUnpickler[L, R](lhs: Unpickler[L], rhs: Unpickler[R]) extends AbstractUnpickler[(L,R)] {
def unpickleField[T](name: String, reader: PReader, unpickler: Unpickler[T]): T = {
val reader1 = reader.readField(name)
// TODO - Always elide tags?
if(unpickler.tag.isEffectivelyPrimitive) reader1.hintElidedType(unpickler.tag)
unpickler.unpickleEntry(reader1).asInstanceOf[T]
}
override def unpickle(tag: String, reader: PReader): Any = {
(unpickleField("_1", reader, lhs), unpickleField("_2", reader, rhs))
}
override def tag: FastTypeTag[(L, R)] =
FastTypeTag.apply(currentMirror, s"scala.Tuple2[${lhs.tag.key},${rhs.tag.key}}]").asInstanceOf[FastTypeTag[(L,R)]]
}
// TODO - This pickler should actually use the known tag if it is passed. Currently it is never used.
class Tuple2RTPickler() extends AbstractPicklerUnpickler[(Any, Any)] {
def tag = FastTypeTag[(Any, Any)]
def pickleField(name: String, value: Any, builder: PBuilder): Unit = {
val (tag1, pickler1) = if (value == null) {
(FastTypeTag.Null.asInstanceOf[FastTypeTag[Any]], Defaults.nullPickler.asInstanceOf[Pickler[Any]])
} else {
val clazz = value.getClass
val tag = FastTypeTag.mkRaw(clazz, reflectRuntime.currentMirror).asInstanceOf[FastTypeTag[Any]]
val pickler = scala.pickling.internal.currentRuntime.picklers.genPickler(clazz.getClassLoader, clazz, tag).asInstanceOf[Pickler[Any]]
(tag, pickler)
}
builder.putField(name, b => {
pickler1.pickle(value, b)
})
}
def pickle(picklee: (Any, Any), builder: PBuilder): Unit = {
// println(s"@@@ using runtime ${this.getClass.getName}")
builder.beginEntry(picklee, tag)
val fld1 = picklee._1
pickleField("_1", fld1, builder)
val fld2 = picklee._2
pickleField("_2", fld2, builder)
builder.endEntry()
// val specialPickler = new SpecialTuple2Pickler(tag1, pickler1, tag2, pickler2)
// SpecialTuple2Pickler.classSelection += ((class1 -> class2) -> Selection(specialPickler, tag))
// println(s"@@@ registered dynamic specialized pickler ${specialPickler.getClass.getName}")
}
def unpickleField(name: String, reader: PReader): Any = {
val reader1 = reader.readField(name)
val tag1 = reader1.beginEntry()
val value = {
if (reader1.atPrimitive) {
reader1.readPrimitive()
} else {
val unpickler1 = internal.currentRuntime.picklers.genUnpickler(reflectRuntime.currentMirror, tag1)
try {
unpickler1.unpickle(tag1, reader1)
} catch {
case PicklingException(msg, cause) =>
throw PicklingException(s"""error in unpickle of '${this.getClass.getName}':
|field name: '$name'
|field tag: '${tag1}'
|message:
|$msg""".stripMargin, cause)
}
}
}
reader1.endEntry()
value
}
def unpickle(tag: String, reader: PReader): Any = {
val fld1 = unpickleField("_1", reader)
val fld2 = unpickleField("_2", reader)
(fld1, fld2)
}
}
| beni55/pickling | core/src/main/scala/scala/pickling/runtime/CustomRuntime.scala | Scala | bsd-3-clause | 5,842 |
/**
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.appjet.oui;
import scala.collection.mutable.HashMap;
import java.util.regex.Pattern;
import java.net.URL;
import org.mortbay.jetty.servlet.Context;
import org.mozilla.javascript.{Scriptable, ScriptableObject, Context => JSContext};
import net.appjet.common.util.BetterFile;
object config {
val values = new HashMap[String, String];
def stringOrElse(name: String, default: String): String = {
val v = values.getOrElse(name, default);
if (v != null) {
val m = propertiesPattern.matcher(v);
val sb = new StringBuffer();
while (m.find()) {
m.appendReplacement(sb, getClass.getDeclaredMethod(m.group(1), Array[Class[_]](): _*).invoke(this, Array[Class[_]](): _*).asInstanceOf[String]);
}
m.appendTail(sb);
sb.toString();
} else {
null;
}
}
def boolOrElse(name: String, default: Boolean) = values.get(name).map(_.matches("(?i)\\\\s*true\\\\s*")).getOrElse(default);
def intOrElse(name: String, default: Int) = values.get(name).map(Integer.parseInt(_)).getOrElse(default);
def longOrElse(name: String, default: Long) = values.get(name).map(java.lang.Long.parseLong(_)).getOrElse(default);
@ConfigParam("Read configuration options from this file before processing any command-line flags.")
{ val argName = "file" }
def configFile = stringOrElse("configFile", null);
// configuation parameters
var specialDebug = false;
@ConfigParam("Enable additional logging output.")
def verbose = boolOrElse("verbose", false);
@ConfigParam("Activate \\"developer\\" mode.")
def devMode = boolOrElse("devMode", false);
@ConfigParam("Activate \\"profiling\\" mode.")
def profile = boolOrElse("profile", false);
@ConfigParam("Directory to use for storing appjet support files, logs, etc. This directory will be created if it does not exist and must be writeable by the user who runs appjet.jar. Defaults to current working directory.")
{ val argName = "directory" }
def appjetHome = stringOrElse("appjetHome", "appjet");
@ConfigParam("Directory to use for storing built-in database (Apache Derby) files. Will be created if it doesn't exist. Defaults to [appjetHome]/db")
def derbyHome = stringOrElse("derbyHome", "[appjetHome]/derbydb");
@ConfigParam("Directory to use for storing appserver logs. Defaults to [appjetHome]/log/appserver")
{ val argName = "directory" }
def logDir = stringOrElse("logDir", "[appjetHome]/log/appserver");
@ConfigParam("Bla bla")
{ val argName = "" }
def logInclude = stringOrElse("logInclude", "");
@GeneratedConfigParam
def logIncludeLst = if (logInclude != "") logInclude.split(",") else null;
@ConfigParam("Bla bla")
{ val argName = "" }
def logExclude = stringOrElse("logExclude", "");
@GeneratedConfigParam
def logExcludeLst = if (logExclude != "") logExclude.split(",") else null;
@ConfigParam("Optional alternative directory to load built-in libraries from. Used by AppJet platform hackers to develop and debug built-in libraries. Default: use built-in libraries.")
{ val argName = "directory" }
def ajstdlibHome = stringOrElse("ajstdlibHome", null);
@ConfigParam("Optional directory to specify as the \\"app home\\".")
{ val argName = "directory" }
def appHome = stringOrElse("appHome", "");
@ConfigParam("Whether to generate https URLs even if running locally behind HTTP (useful for Apache handling HTTPS)")
def useHttpsUrls = boolOrElse("useHttpsUrls", false);
@ConfigParam("Search path for modules imported via \\"import\\". Defaults to current working directory.")
{ val argName = "dir1:dir2:..." }
def modulePath = stringOrElse("modulePath", null);
def moduleRoots =
Array.concat(Array("."), if (modulePath != null) modulePath.split(":") else Array[String](), Array(ajstdlibHome));
@ConfigParam("Where to read the static files from on the local filesystem. Don't specify this to read static files from the classpath/JAR.")
{ val argName = "directory" }
def useVirtualFileRoot = stringOrElse("useVirtualFileRoot", null);
@ConfigParam("Directory to use for storing the temporary sessions file on shutdown. Will be created if it does not exist.")
{ val argName = "directory" }
def sessionStoreDir = stringOrElse("sessionStoreDir", "[appjetHome]/sessions");
// performance tuning
@ConfigParam("Create this many runners before opening up the server.")
{ val argName = "count" }
def preloadRunners = intOrElse("preloadRunners", 0);
@ConfigParam("Have this many JDBC connections available in the pool.")
{ val argName = "count" }
def jdbcPoolSize = intOrElse("jdbcPoolSize", 10);
@ConfigParam("Max count of worker threads.")
{ val argName = "num" }
def maxThreads = intOrElse("maxThreads", 250);
// specifying ports and such
def extractHostAndPort(s: String): (String, Int) =
if (s.indexOf(":") >= 0)
(s.split(":")(0), Integer.parseInt(s.split(":")(1)))
else
("", Integer.parseInt(s))
@ConfigParam("Whether to show the port numbers to the outside world (false: assume ports visible from the outside are the default http/https ports)")
def hidePorts = boolOrElse("hidePorts", false);
@ConfigParam("[host:]port on which to serve the app. Default: 8080.")
{ val argName = "[host:]port" }
def listen = stringOrElse("listen", "8080");
@GeneratedConfigParam
def listenHost = extractHostAndPort(listen)._1;
@GeneratedConfigParam
def listenPort = extractHostAndPort(listen)._2;
@ConfigParam("[host:]port on which to serve the app using SSL. Default: none.")
{ val argName = "[host:]port" }
def listenSecure = stringOrElse("listenSecure", "0");
@GeneratedConfigParam
def listenSecureHost = extractHostAndPort(listenSecure)._1;
@GeneratedConfigParam
def listenSecurePort = extractHostAndPort(listenSecure)._2;
@ConfigParam("[host:]port:port on which to listen for monitoring. Default: none.")
{ val argName = "[host:]primaryPort:secondaryPort" }
def listenMonitoring = stringOrElse("listenMonitoring", "0:0");
def extractHostAndPortPort(s: String): (String, Int, Int) = {
val spl = s.split(":", 3);
if (spl.length > 2)
(spl(0), Integer.parseInt(spl(1)), Integer.parseInt(spl(2)))
else
("", Integer.parseInt(spl(0)), Integer.parseInt(spl(1)));
}
@GeneratedConfigParam
def listenMonitoringHost = extractHostAndPortPort(listenMonitoring)._1;
@GeneratedConfigParam
def listenMonitoringPrimaryPort = extractHostAndPortPort(listenMonitoring)._2;
@GeneratedConfigParam
def listenMonitoringSecondaryPort = extractHostAndPortPort(listenMonitoring)._3;
@ConfigParam("[host:]port on which to listen for RPCs (via SARS). Default: none.")
{ val argName = "[host:]port" }
def listenSars = stringOrElse("listenSars", "0");
@GeneratedConfigParam
def listenSarsHost = extractHostAndPort(listenSars)._1;
@GeneratedConfigParam
def listenSarsPort = extractHostAndPort(listenSars)._2;
// Licensing
@ConfigParam("Private key for generating license keys.")
{ val argName = "pathToKey" }
def licenseGeneratorKey = stringOrElse("licenseGeneratorKey", null);
// SARS
@ConfigParam("SARS auth key. Default: \\"appjet\\".")
{ val argName = "authkey" }
def sarsAuthKey = stringOrElse("sarsAuthKey", "appjet");
// SSL
@ConfigParam("[SSL] Keystore location. Default: appjetHome/sslkeystore.")
{ val argName = "keystore" }
def sslKeyStore = stringOrElse("sslKeyStore", appjetHome+"/sslkeystore");
def sslKeyStore_isSet = values.contains("sslKeyStore");
@ConfigParam("[SSL] Key password. Default: same as store password.")
{ val argName = "password" }
def sslKeyPassword = stringOrElse("sslKeyPassword", "[sslStorePassword]");
@ConfigParam("[SSL] Store password. Default: 'appjet'.")
{ val argName = "password" }
def sslStorePassword = stringOrElse("sslStorePassword", "appjet");
// email
@ConfigParam("host:port of mail server to use for sending email. Default: localhost:25.")
{ val argName = "host:port" }
def smtpServer = stringOrElse("smtpServer", "localhost:25");
def smtpServerHost = extractHostAndPort(smtpServer)._1;
def smtpServerPort = extractHostAndPort(smtpServer)._2;
@ConfigParam("username for authentication to mail server. Default: no authentication.")
{ val argName = "username" }
def smtpUser = stringOrElse("smtpUser", "");
@ConfigParam("password for authentication to mail server. Default: no authentication.")
{ val argName = "password" }
def smtpPass = stringOrElse("smtpPass", "");
@ConfigParam("true or false to use starttls (TLS authentication) when connecting to mail server. Default: false.")
{ val argName = "smtpStartTls" }
def smtpStartTls = stringOrElse("smtpStartTls", "false");
// comet
@ConfigParam("prefix for all comet requests. Required to use Comet system.")
{ val argName = "path" }
def transportPrefix = stringOrElse("transportPrefix", null);
@ConfigParam("Use a subdomain for all comet requests.")
def transportUseWildcardSubdomains = boolOrElse("transportUseWildcardSubdomains", false);
@ConfigParam("Don't use short polling, ever.")
def disableShortPolling = boolOrElse("disableShortPolling", false);
// helpers
val allProperties =
for (m <- getClass.getDeclaredMethods() if (m.getAnnotation(classOf[ConfigParam]) != null || m.getAnnotation(classOf[GeneratedConfigParam]) != null))
yield m;
val configParamNames =
for (m <- allProperties if m.getAnnotation(classOf[ConfigParam]) != null) yield m.getName
lazy val allPropertiesMap =
Map((for (m <- allProperties) yield ((m.getName, () => m.invoke(this)))): _*);
val propertiesPattern = Pattern.compile("\\\\[("+allProperties.map(x => "(?:"+x.getName()+")").mkString("|")+")\\\\]");
override def toString() =
(allProperties.map(m => m.getName()+" -> "+m.invoke(this)) ++
values.keys.toList.filter(! allPropertiesMap.contains(_)).map(k => k+" -> "+values(k))).mkString("[Config ", ", ", "]");
def print {
for (m <- allProperties) {
println(m.getName() + " -> " + m.invoke(this));
}
for ((k, v) <- values if (! allPropertiesMap.contains(k))) {
println(k + " -> " + v);
}
}
def configObject(globalScope: Scriptable) =
new ScriptableAdapter {
val keys = (Set.empty[Object] ++ allProperties.map(m => m.getName) ++ values.keySet).toList.toArray;
override def get(n: String, start: Scriptable) =
allPropertiesMap.getOrElse(n, () => values.getOrElse(n, JSContext.getUndefinedValue()))();
override def put(n: String, start: Scriptable, value: Object) =
values(n) = value.toString();
override def getIds() = keys;
override def getPrototype() = ScriptableObject.getObjectPrototype(globalScope);
override def has(n: String, start: Scriptable) =
allPropertiesMap.contains(n) || values.contains(n);
override def getDefaultValue(hint: Class[_]) = config.toString();
}
}
object global {
var context: Context = null;
}
| floatingatoll/pad | infrastructure/net.appjet.oui/config.scala | Scala | apache-2.0 | 11,823 |
package uk.gov.gds.ier.transaction.forces.nationality
import uk.gov.gds.ier.validation.{ErrorTransformForm, ErrorMessages, FormKeys}
import uk.gov.gds.ier.model.{PartialNationality, Contact}
import play.api.data.Forms._
import uk.gov.gds.ier.transaction.forces.InprogressForces
import play.api.data.validation.{Valid, Invalid, Constraint}
import uk.gov.gds.ier.validation.constants.NationalityConstants
trait NationalityForms extends NationalityConstraints {
self: FormKeys
with ErrorMessages =>
val nationalityForm = ErrorTransformForm(
mapping(
keys.nationality.key -> optional(PartialNationality.mapping),
keys.contact.key -> optional(Contact.mapping)
) (
(nationality, contact) => InprogressForces(
nationality = nationality,
contact = contact
)
) (
inprogress => Some(inprogress.nationality, inprogress.contact)
) verifying (
nationalityIsChosen,
notTooManyNationalities,
otherCountry0IsValid,
otherCountry1IsValid,
otherCountry2IsValid,
atleastOneOtherCountryIfHasOtherCountry
)
)
}
trait NationalityConstraints extends FormKeys with ErrorMessages {
lazy val atleastOneOtherCountryIfHasOtherCountry = Constraint[InprogressForces] (
keys.nationality.otherCountries.key
) { application =>
val numberOtherCoutries = application.nationality.foldLeft(0) {
(zero, nationality) => nationality.otherCountries.size
}
val hasOtherCountry = application.nationality.flatMap(_.hasOtherCountry)
(hasOtherCountry, numberOtherCoutries) match {
case (Some(true), 0) => Invalid(
"Please answer this question",
keys.nationality.otherCountries
)
case _ => Valid
}
}
lazy val notTooManyNationalities = Constraint[InprogressForces](keys.nationality.key) {
application =>
val numberOtherCoutries = application.nationality.foldLeft(0) {
(zero, nationality) => nationality.otherCountries.size
}
if (numberOtherCoutries > NationalityConstants.numberMaxOfOtherCountries) {
Invalid(
"You can specify no more than five countries",
keys.nationality.otherCountries
)
} else {
Valid
}
}
lazy val nationalityIsChosen = Constraint[InprogressForces](keys.nationality.key) {
application =>
val britishChecked = application.nationality.flatMap(_.british).getOrElse(false)
val irishChecked = application.nationality.flatMap(_.irish).getOrElse(false)
val hasOtherCountry = application.nationality.flatMap(_.hasOtherCountry).getOrElse(false)
val otherCountryFilled = application.nationality.map{ nat =>
nat.otherCountries.size > 0
}.getOrElse(false)
val nationalityFilled = britishChecked || irishChecked || otherCountryFilled || hasOtherCountry
val excuseFilled = application.nationality.flatMap(_.noNationalityReason).exists(_.nonEmpty)
if (nationalityFilled || excuseFilled) {
Valid
} else {
Invalid(
"Please answer this question",
keys.nationality
)
}
}
lazy val otherCountry0IsValid = otherCountryIsValid(0)
lazy val otherCountry1IsValid = otherCountryIsValid(1)
lazy val otherCountry2IsValid = otherCountryIsValid(2)
private def otherCountryIsValid(i:Int) = Constraint[InprogressForces](
keys.nationality.otherCountries.key
) { application =>
val otherCountry = application.nationality.flatMap(_.otherCountries.lift(i))
val otherCountryValid = otherCountry.exists { country =>
NationalityConstants.validNationalitiesList.contains(country)
}
(otherCountry, otherCountryValid) match {
case (Some(c), false) => Invalid(
"This is not a valid entry",
keys.nationality.otherCountries.item(i)
)
case _ => Valid
}
}
}
| alphagov/ier-frontend | app/uk/gov/gds/ier/transaction/forces/nationality/NationalityForms.scala | Scala | mit | 3,866 |
package sparkDemo
import org.apache.spark.{Logging, SparkConf}
import org.apache.spark.SparkEnv.{logDebug, logInfo}
import org.apache.spark.rpc.{RpcEndpoint, RpcEndpointRef}
import org.apache.spark.serializer.Serializer
import org.apache.spark.util.RpcUtils
/**
* Created by liush on 17-8-5.
* trait支持部分实现,也就是说可以在scala的trait中可以实现部分方法
* trait和抽象类的区别在于抽象类是对一个继承链的,类和类之前确实有父子类的继承关系,而trait则如其名字,表示一种特征,可以多继承。
*/
object InstantiateClass extends App with Logging {
val conf: SparkConf = new SparkConf()
def instantiateClass[T](className: String): T = {
logInfo(s"className: ${className}")
val cls = classForName(className)
logInfo(s"cls: ${cls}")
// Look for a constructor taking a SparkConf and a boolean isDriver, then one taking just
// SparkConf, then one taking no arguments
//寻找一个构造函数,使用一个SparkConf和一个布尔值为isDriver的代码,然后需要一个参数Boolean的SparkConf构造函数
//查找一个sparkconf构造函数,是否isDriver
try {
//classOf类强制类型转换SparkConf类,classOf[T]`等同于Java中的类文字`T.class`。
logInfo(s"classOf[SparkConf]: ${classOf[SparkConf]}")
val tset=cls.getConstructor(classOf[SparkConf], java.lang.Boolean.TYPE)
.newInstance(conf, new java.lang.Boolean(true))
//asInstanceOf强制类型[T]对象
.asInstanceOf[T]
logInfo(s"asInstanceOf[T]: ${tset.toString}")
cls.getConstructor(classOf[SparkConf], java.lang.Boolean.TYPE)
.newInstance(conf, new java.lang.Boolean(true))
//asInstanceOf强制类型[T]对象
.asInstanceOf[T]
} catch {
case _: NoSuchMethodException =>
try {
logInfo(s"asInstanceOf[T]: ${cls.getConstructor(classOf[SparkConf]).newInstance(conf).asInstanceOf[T]}")
cls.getConstructor(classOf[SparkConf]).newInstance(conf).asInstanceOf[T]
} catch {
case _: NoSuchMethodException =>
logInfo(s"1111111 asInstanceOf[T]: ${cls.getConstructor().newInstance()}")
logInfo(s"asInstanceOf[T]: ${cls.getConstructor().newInstance().asInstanceOf[T]}")
cls.getConstructor().newInstance().asInstanceOf[T]
}
}
}
def classForName(className: String): Class[_] = {
val classLoader = getContextOrSparkClassLoader
logInfo(s"classLoader: ${classLoader}")
Class.forName(className, true, getContextOrSparkClassLoader)
// scalastyle:on classforname
}
def getContextOrSparkClassLoader: ClassLoader = {
//Thread.currentThread().getContextClassLoader,可以获取当前线程的引用,getContextClassLoader用来获取线程的上下文类加载器
val ContextClassLoader=Thread.currentThread().getContextClassLoader
logInfo(s"ContextClassLoader: ${ContextClassLoader}")
//Thread.currentThread().getContextClassLoader,可以获取当前线程的引用,getContextClassLoader用来获取线程的上下文类加载器
Option(Thread.currentThread().getContextClassLoader).getOrElse(getSparkClassLoader)
}
def getSparkClassLoader: ClassLoader ={
logInfo(s"getClass.getClassLoader: ${ getClass.getClassLoader}")
getClass.getClassLoader
}
def instantiateClassFromConf[T](propertyName: String, defaultClassName: String): T = {
instantiateClass[T](conf.get(propertyName, defaultClassName))
}
val serializer = instantiateClassFromConf[Serializer](
"spark.serializer", "org.apache.spark.serializer.JavaSerializer")
logInfo(s"Using serializer: ${serializer.getClass}")
println("====="+serializer.getClass)
}
| tophua/spark1.52 | examples/src/main/scala/sparkDemo/InstantiateClass.scala | Scala | apache-2.0 | 3,731 |
package org.jetbrains.plugins.scala.lang.transformation.annotations
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.extensions.{&&, PsiElementExt}
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScUnderscoreSection
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaCode._
import org.jetbrains.plugins.scala.lang.psi.types.result.Typeable
import org.jetbrains.plugins.scala.lang.transformation._
import org.jetbrains.plugins.scala.project.ProjectContext
/**
* @author Pavel Fatin
*/
class AddTypeToUnderscoreParameter extends AbstractTransformer {
def transformation(implicit project: ProjectContext): PartialFunction[PsiElement, Unit] = {
case (e: ScUnderscoreSection) && Typeable(t) if !e.nextSibling.exists(_.getText == ":") =>
val annotation = annotationFor(t, e)
val result = e.replace(code"(_: $annotation)")
bindTypeElement(result.getFirstChild.getNextSibling.getLastChild)
}
}
| ilinum/intellij-scala | src/org/jetbrains/plugins/scala/lang/transformation/annotations/AddTypeToUnderscoreParameter.scala | Scala | apache-2.0 | 949 |
package com.temportalist.weepingangels.server
import com.temportalist.weepingangels.common.ProxyCommon
/**
*
*
* @author TheTemportalist
*/
class ProxyServer() extends ProxyCommon {
}
| TheTemportalist/WeepingAngels | src/main/scala/com/temportalist/weepingangels/server/ProxyServer.scala | Scala | apache-2.0 | 191 |
/*
* Copyright 2015 Paul Horn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ff7
package battle
final case class BattleField(heroes: Team, enemies: Team, round: Int, history: List[BattleField], aborted: Boolean, result: Option[BattleResult]) {
def isFinished: Boolean =
aborted || (List(heroes, enemies) exists (_.persons forall (_.hp.x <= 0)))
def round(br: BattleResult): BattleField =
copy(round = round + 1, history = copy(result = Some(br)) :: history)
def swap: BattleField =
copy(heroes = enemies, enemies = heroes)
def cycle: BattleField =
copy(heroes = heroes.cycle)
override def toString: String = s"Battle [$heroes] vs [$enemies]"
}
object BattleField {
def init(heroes: Team, enemies: Team): BattleField =
BattleField(heroes, enemies, 0, Nil, aborted = false, result = None)
}
| knutwalker/ff7-simulator | api/src/main/scala/ff7/battle/BattleField.scala | Scala | apache-2.0 | 1,351 |
/*
* Copyright 2012 The SIRIS Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* The SIRIS Project is a cooperation between Beuth University, Berlin and the
* HCI Group at the University of Würzburg. The project is funded by the German
* Federal Ministry of Education and Research (grant no. 17N4409).
*/
package simx.components.renderer.jvr
import simx.core.ontology.SValDescription
import java.io.{ObjectInputStream, ObjectOutputStream, File}
import de.bht.jvr.core.{ShaderProgram, ShaderMaterial}
import de.bht.jvr.math.Vector2
import de.bht.jvr.core.uniforms._
import simx.core.entity.typeconversion.ConvertibleTrait
import de.bht.jvr.util.Color
import simx.core.entity.Entity
import simx.core.svaractor.semantictrait.base.{Thing, Base}
import simx.core.svaractor.{SVarActor, SVar}
import simx.core.svaractor.unifiedaccess.EntityUpdateHandling
import scala.annotation.meta.field
/**
* A domain specific language to specify shader effects for jVR and make parameters of them accessible to the trough
* the middle ware.
*
* Different shader can be used for every render pass. The following example describes a parallax mapping.
*
* {{{
def makeParallaxMaterial( name : String, file : String, heightScale: Float, heightBias: Float, shininess : Float ) =
ShaderEffect( name ) has(
RenderPass( "AMBIENT" ) describedByShader( "shader/bumpmapping_ambient.vs" :: "shader/parallaxmapping_ambient.fs" :: Nil ) provideImage
(file + "_COLOR.jpg") as "jvr_Texture0" provideImage
(file + "_DISP.jpg") as "jvr_HeightMap" where
"jvr_HeightScale" hasValue( heightScale ) and
"jvr_ParallaxBias" hasValue( heightBias ) and
"jvr_Material_Ambient" hasValue( new Color( 0.1f, 0.1f, 0.1f, 1f ) ) pack
) and( RenderPass( "LIGHTING" ) describedByShader( "shader/bumpmapping_lighting.vs" :: "shader/parallaxmapping_lighting.fs" :: Nil ) provideImage
(file + "_COLOR.jpg") as "jvr_Texture0" provideImage
(file + "_DISP.jpg") as "jvr_HeightMap" provideImage
(file + "_NORMAL.jpg") as "jvr_NormalMap" where
"jvr_HeightScale" hasValue( heightScale ) and
"jvr_ParallaxBias" hasValue( heightBias ) and
"jvr_Material_Diffuse" hasValue( new Color(1.0f, 1.0f, 1.0f, 1.0f ) ) and
"jvr_Material_Specular" hasValue( new Color(0.6f, 0.6f, 0.6f, 1.0f) ) and
"jvr_Material_Shininess" hasValue( shininess ) pack
)
* }}}
*
* @param name The name of the shader effect.
*/
class ShaderEffect( name : String ) extends Serializable {
require( name != null, "The parameter 'name' must not be 'null'!" )
require( name != "", "The parameter 'name' must not be an empty string!" )
/**
* The list of all render passes.
*/
private var renderPasses = List[RenderPass]()
/**
* The constructed shader material for this effect.
*/
@(transient @field) private var shaderMaterial : Option[ShaderMaterial] = None
/**
* This method adds a render pass to this shader effect.
*
* @param renderPass The render pass to add.
* @return The altered shader effect.
*/
def has( renderPass : RenderPass ) : ShaderEffect = {
require( renderPass != null, "The parameter 'renderPass' must not be 'null'!" )
renderPasses = renderPasses ::: renderPass :: Nil
this
}
/**
* This method adds a render pass to this shader effect.
*
* @param renderPass The render pass to add.
* @return The altered shader effect.
*/
def and( renderPass : RenderPass ) : ShaderEffect = has( renderPass )
/**
* This method constructs and returns the shader material for this effect.
*
* @return The shader material for this effect.
*/
private[jvr] def getShaderMaterial : ShaderMaterial = {
if( !shaderMaterial.isDefined ) {
val sm = new ShaderMaterial( )
for( renderPass <- renderPasses ) {
sm.setShaderProgram( renderPass.name, renderPass.getShaderProgram )
for( (image,name) <- renderPass.images )
sm.setTexture( renderPass.name, name, ResourceManager.loadTexture( new File( image ) ) )
for( uniform <- renderPass.uniformList ) {
val uniformValue = uniform.value match {
case v : Float => new UniformFloat( v )
case v : Int => new UniformInt( v )
case v : Boolean => new UniformBool( v )
case v : Vector2 => new UniformVector2( v )
case v : Color => new UniformColor( v )
case v : Seq[_] =>
sm.setUniform( renderPass.name, uniform.name + "_size", new UniformInt( v.size ) )
v.head match {
case h : Vector2 =>
new UniformVector2( v.asInstanceOf[Seq[Vector2]].toArray : _* )
}
}
sm.setUniform( renderPass.name, uniform.name, uniformValue )
}
}
shaderMaterial = Some( sm )
}
shaderMaterial.get
}
private[jvr] def setShaderMaterial(sm : ShaderMaterial) {
shaderMaterial = Some(sm)
}
def bindShaderMaterialToEntity( entity : Entity )(implicit actorContext : EntityUpdateHandling) = {
for( renderPass <- renderPasses ) {
for( uniformManager <- renderPass.uniformList ) {
if( uniformManager.ontologyMember.isDefined )
uniformManager.value match {
case v : Float =>
val material = getShaderMaterial
entity.getSVars( uniformManager.ontologyMember.get ).head._2.asInstanceOf[SVar[Float]].observe{
v =>material.setUniform( renderPass.name, uniformManager.name, new UniformFloat( v ) )
}
case v : Int =>
entity.getSVars( uniformManager.ontologyMember.get ).head._2.asInstanceOf[SVar[Int]].observe{
v => getShaderMaterial.setUniform( renderPass.name, uniformManager.name, new UniformInt( v ) )
}
case v : Boolean =>
entity.getSVars( uniformManager.ontologyMember.get ).head._2.asInstanceOf[SVar[Boolean]].observe{
v => getShaderMaterial.setUniform( renderPass.name, uniformManager.name, new UniformBool( v ) )
}
case v : Vector2 =>
entity.getSVars( uniformManager.ontologyMember.get ).head._2.asInstanceOf[SVar[Vector2]].observe{
v => getShaderMaterial.setUniform( renderPass.name, uniformManager.name, new UniformVector2( v ) )
}
case v : Seq[_] =>
v.head match {
case h : Vector2 =>
entity.getSVars( uniformManager.ontologyMember.get ).head._2.asInstanceOf[SVar[Seq[Vector2]]].observe{ v : Seq[Vector2] =>
getShaderMaterial.setUniform( renderPass.name, uniformManager.name, new UniformVector2( v.toArray : _* ) )
getShaderMaterial.setUniform( renderPass.name, uniformManager.name + "_size", new UniformInt( v.size ) )
}
}
}
}
}
}
/**
* This method returns a list of all sVar Descriptions that are provided by this effect. It is used during the
* construction process of the entity.
*
* @return A list of all sVar descriptions provided by this effect.
*/
private[jvr] def getSVarDescriptions : List[SValDescription[_,_, _ <: Base, _ <: Thing]] = {
var sVarDescriptions = List[SValDescription[_,_, _ <: Base, _ <: Thing]]()
for( renderPass <- this.renderPasses ) {
for( uniformManager <- renderPass.uniformList ) {
if( uniformManager.ontologyMember.isDefined ) {
sVarDescriptions = uniformManager.ontologyMember.get :: sVarDescriptions
}
}
}
sVarDescriptions
}
/**
* This method return the initial value for an sVar.
*
* @param sVarDescription The description of the sVar
* @tparam T The type of the variable.
* @return The initial value of the sVar.
*/
private[jvr] def getValueForSVarDescription[T]( sVarDescription : ConvertibleTrait[T]) : T = {
var value : Option[T] = None
for( renderPass <- this.renderPasses ) {
for( uniformManager <- renderPass.uniformList ) {
if( uniformManager.ontologyMember.isDefined && uniformManager.ontologyMember.get == sVarDescription ) {
value = Some( uniformManager.value.asInstanceOf[T] )
}
}
}
value.get
}
private def writeObject( objectOutputStream : ObjectOutputStream ) {
objectOutputStream.defaultWriteObject()
}
private def readObject( objectInputStream : ObjectInputStream ) {
objectInputStream.defaultReadObject()
shaderMaterial = None
}
}
/**
* An object that configure the shader and uniforms for one render pass. Typical render passes are AMBIENT and LIGHTING.
*
* @author Stephan Rehfeld
*
* @param name The name of the render pass.
*/
class RenderPass( val name : String ) extends UniformListContaining[RenderPass] with Serializable {
require( name != null, "The parameter 'name' must not be 'null'!" )
require( name != "", "The parameter 'name' must not be an empty string!" )
/**
* The list of shader that are used by this effect.
*/
private var shader = List[String]()
/**
* Textures and the names under which they are provided to the shader.
*/
private[jvr] var images : Map[String,String] = Map()
/**
* The shader material that is constructed for this post processing effect.
*/
@(transient @field) private var shaderProgram : Option[ShaderProgram] = None
override var uniformList : List[UniformManager[_, _,RenderPass]] = List()
/**
* This method sets the shader that are used to render this effect.
*
* @param shader The list of shader, that are used to render this effect.
* @return The altered shader effect.
*/
def describedByShader( shader : List[String] ) : RenderPass = {
require( shader != null, "The parameter 'shader' must not be 'null'!" )
require( shader.size > 1, "The parameter 'shader' contain at least 2 entries!" )
this.shader = shader
this
}
/**
* This method adds a texture texture to the shader.
*
* @param file The file name of the texture.
* @return The object to construct the texture description.
*/
def provideImage( file : String ) : TextureMapper = {
require( file != null, "The parameter 'name' must not be 'null'!" )
require( file != "", "The parameter 'name' must not an empty string'!" )
new TextureMapper( file, this )
}
/**
* This is a small helper class for the shader effect domain specific language.
*
* @author Stephan Rehfeld
*
* @param file The file name of the texture
* @param renderPass The render pass.
*/
class TextureMapper( val file : String, val renderPass : RenderPass ) {
require( file != null, "The parameter 'file' must not be 'null'!" )
require( file != "", "The parameter 'file' must not an empty string'!" )
require( renderPass != null, "The parameter 'renderPass' must not be 'null'!" )
/**
* This method sets the name under which the texture should be provided to the render pass.
*
* @param targetName The name under which the texture should be provided to the render pass.
* @return The altered post processing effect.
*/
def as( targetName : String ) : RenderPass = {
require( targetName != null, "The parameter 'targetName' must not be 'null'!" )
require( targetName != "", "The parameter 'targetName' must not an empty string'!" )
images = images + (file -> targetName )
renderPass
}
}
/**
* This method starts to add a uniform to the render pass.
*
* @param name The name of the uniform.
* @return The object to construct the uniform description.
*/
def where( name : String ) = {
require( name != null, "The parameter 'name' must not be 'null'!" )
require( name != "", "The parameter 'name' must not an empty string'!" )
new UniformNameHolder( name, this )
}
/**
* This method constructs and returns the shader material for this render pass.
*
* @return The shader material for this render pass.
*/
def getShaderProgram : ShaderProgram = {
if( shaderProgram.isEmpty ) {
val shaderFiles = for( file <- shader ) yield new File( file )
val sp = new ShaderProgram( shaderFiles: _* )
shaderProgram = Some( sp )
}
this.shaderProgram.get
}
private def writeObject( objectOutputStream : ObjectOutputStream ) {
objectOutputStream.defaultWriteObject()
}
private def readObject( objectInputStream : ObjectInputStream ) {
objectInputStream.defaultReadObject()
shaderProgram = None
}
}
/**
* The companion object of the render pass.
*
* @see [[simx.components.renderer.jvr.RenderPass]]
*
* @author Stephan Rehfeld
*/
object RenderPass {
/**
* The method to start a description of a render pass.
*
* @param name The name of the render pass.
* @return The render pass.
*/
def apply( name : String ) : RenderPass = {
require( name != null, "The parameter 'name' must not be 'null'!" )
require( name != "", "The parameter 'name' must not be an empty string!" )
new RenderPass( name )
}
}
/**
* The companion object of the shader effect.
*
* @see [[simx.components.renderer.jvr.ShaderEffect]]
*
* @author Stephan Rehfeld
*/
object ShaderEffect {
/**
* The method to start a description of a shader effect.
*
* @param name The name of the shader effect.
* @return The shader effect.
*/
def apply( name : String ) : ShaderEffect = {
require( name != null, "The parameter 'name' must not be 'null'!" )
new ShaderEffect( name )
}
} | simulator-x/jvr-rendering | src/simx/components/renderer/jvr/ShaderEffect.scala | Scala | apache-2.0 | 14,191 |
package org.dama.datasynth.lang
import org.dama.datasynth.executionplan.ExecutionPlan.Table
import org.dama.datasynth.schema.Schema
import org.junit.runner
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FlatSpec, Matchers}
import scala.io.Source
/**
* Created by joangui on 13/04/2017.
*/
@RunWith(classOf[JUnitRunner])
class CreateExecutionPlanTest extends FlatSpec with Matchers {
/** Generator tests **/
"A ReadExecutionPlan createExectuionPlan propertyTablesNodes.size " should "output like 3" in{
val json : String = Source.fromFile("./src/test/resources/propertyTableTest.json").getLines.mkString
val schema: Schema = ReadExecutionPlan.loadSchema(json)
val executionPlanNodes:Seq[Table]=ReadExecutionPlan.createExecutionPlan(schema)
executionPlanNodes.size should be (3)
}
"A ReadExecutionPlan createExectuionPlan voidList.isEmpty " should "output like true" in{
val json : String = Source.fromFile("./src/test/resources/propertyTableTest.json").getLines.mkString
val schema: Schema = ReadExecutionPlan.loadSchema(json)
val executionPlanNodes:Seq[Table]=ReadExecutionPlan.createExecutionPlan(schema)
val propertyTables:Seq[String] = Seq("[PropertyTable,person.sex]","[PropertyTable,person.country]","[PropertyTable,person.name]")
val voidList = executionPlanNodes.filter(node=>{!propertyTables.contains(node.toString)})
voidList.isEmpty should be (true)
}
"A ReadExecutionPlan createExectuionPlan propertyTablesNodes.size " should "output like 5" in{
val json : String = Source.fromFile("./src/test/resources/propertyTableTest2.json").getLines.mkString
val schema: Schema = ReadExecutionPlan.loadSchema(json)
val executionPlanNodes:Seq[Table]=ReadExecutionPlan.createExecutionPlan(schema)
executionPlanNodes.size should be (5)
}
"A ReadExecutionPlan createExectuionPlan with edges propertyTablesNodes.size " should "output like 3" in{
val json : String = Source.fromFile("./src/test/resources/edgeTableTest.json").getLines.mkString
val schema: Schema = ReadExecutionPlan.loadSchema(json)
val executionPlanNodes:Seq[Table]=ReadExecutionPlan.createExecutionPlan(schema)
executionPlanNodes.size should be (4)
}
"A ReadExecutionPlan createExectuionPlan with edges and correlation propertyTablesNodes.size " should "output like 5" in{
val json : String = Source.fromFile("./src/test/resources/edgeTableTest2.json").getLines.mkString
val schema: Schema = ReadExecutionPlan.loadSchema(json)
val executionPlanNodes:Seq[Table]=ReadExecutionPlan.createExecutionPlan(schema)
executionPlanNodes.size should be (5)
}
}
| joangui/DataSynth | src/test/scala/org/dama/datasynth/lang/CreateExecutionPlanTest.scala | Scala | gpl-3.0 | 2,690 |
import sbt._
import sbt.Keys._
import sbt.plugins.JvmPlugin
object Common extends AutoPlugin {
override def trigger = allRequirements
override def requires = JvmPlugin
override lazy val projectSettings = Seq(
organization := "ScalaSthlm",
organizationName := "Scala Stockholm",
homepage := Some(url("https://github.com/ScalaSthlm/alpakka-integration-patterns")),
scmInfo := Some(ScmInfo(url("https://github.com/ScalaSthlm/alpakka-integration-patterns"), "git@github.com:ScalaSthlm/alpakka-integration-patterns.git")),
developers += Developer("contributors", "Contributors", "@ScalaSthlm", url("https://github.com/ScalaSthlm/alpakka-integration-patterns/graphs/contributors")),
licenses := Seq(("Apache-2.0", url("http://www.apache.org/licenses/LICENSE-2.0"))),
scalaVersion := crossScalaVersions.value.head,
crossScalaVersions := Dependencies.ScalaVersions,
crossVersion := CrossVersion.binary,
scalacOptions ++= Seq(
"-encoding", "UTF-8",
"-feature",
"-unchecked",
"-deprecation",
//"-Xfatal-warnings",
"-Xlint",
"-Yno-adapted-args",
"-Ywarn-dead-code",
"-Xfuture"
),
javacOptions ++= Seq(
"-Xlint:unchecked"
),
autoAPIMappings := true,
apiURL := None,
// show full stack traces and test case durations
testOptions in Test += Tests.Argument("-oDF"),
ivyScala := ivyScala.value.map(_.copy(overrideScalaVersion = sbtPlugin.value)) // TODO Remove once this workaround no longer needed (https://github.com/sbt/sbt/issues/2786)!
)
}
| ScalaSthlm/alpakka-integration-patterns | project/Common.scala | Scala | apache-2.0 | 1,580 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution.internal.collection.queues
import java.util
import monix.execution.internal.collection.ConcurrentQueue
import scala.collection.mutable
private[internal] class FromJavaQueue[A](queue: util.Queue[A])
extends ConcurrentQueue[A] {
final def offer(elem: A): Int =
if (queue.offer(elem)) 0 else 1
final def poll(): A =
queue.poll()
final def clear(): Unit =
queue.clear()
final def drainToBuffer(buffer: mutable.Buffer[A], limit: Int): Int = {
var idx = 0
var hasElems = true
while (hasElems && idx < limit) {
val a = queue.poll()
if (a != null) {
buffer += a
idx += 1
} else {
hasElems = false
}
}
idx
}
}
| Wogan/monix | monix-execution/jvm/src/main/scala/monix/execution/internal/collection/queues/FromJavaQueue.scala | Scala | apache-2.0 | 1,398 |
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.deeplang.doperations
import org.apache.spark.sql.types.StructType
import io.deepsense.commons.utils.Version
import io.deepsense.deeplang.DOperation.Id
import io.deepsense.deeplang.doperables.dataframe.DataFrame
import io.deepsense.deeplang.doperables.report.Report
import io.deepsense.deeplang.doperables.{Estimator, Transformer}
import io.deepsense.deeplang.inference.{InferContext, InferenceWarnings}
import io.deepsense.deeplang.params.{NumericParam, Param}
import io.deepsense.deeplang.{ExecutionContext, UnitSpec}
class EstimatorAsFactorySpec extends UnitSpec {
import EstimatorAsFactorySpec._
"EstimatorAsFactory" should {
"have the same parameters as the Estimator" in {
val mockEstimator = new MockEstimator
val mockFactory = new MockEstimatorFactory
mockFactory.extractParamMap() shouldBe mockEstimator.extractParamMap()
mockFactory.params shouldBe mockEstimator.params
}
val paramValue1 = 100
val paramValue2 = 1337
"produce an Estimator with parameters set" in {
val mockFactory = new MockEstimatorFactory
mockFactory.set(mockFactory.estimator.param -> paramValue1)
val Vector(estimator: MockEstimator) =
mockFactory.executeUntyped(Vector.empty)(mock[ExecutionContext])
estimator.get(mockFactory.estimator.param) shouldBe Some(paramValue1)
}
"return the same instance of estimator each time" in {
val mockFactory = new MockEstimatorFactory
mockFactory.set(mockFactory.estimator.param -> paramValue1)
val estimator1 = execute(mockFactory)
estimator1.get(mockFactory.estimator.param) shouldBe Some(paramValue1)
mockFactory.set(mockFactory.estimator.param -> paramValue2)
val estimator2 = execute(mockFactory)
estimator2.get(mockFactory.estimator.param) shouldBe Some(paramValue2)
}
"infer knowledge" in {
val mockFactory = new MockEstimatorFactory
mockFactory.set(mockFactory.estimator.param -> paramValue1)
val (Vector(knowledge), warnings) = mockFactory.inferKnowledgeUntyped(Vector.empty)(mock[InferContext])
knowledge should have size 1
knowledge.single shouldBe a[MockEstimator]
val estimator = knowledge.single.asInstanceOf[MockEstimator]
estimator.extractParamMap() shouldBe execute(mockFactory).extractParamMap()
warnings shouldBe InferenceWarnings.empty
}
}
private def execute(factory: MockEstimatorFactory): MockEstimator =
factory.executeUntyped(Vector.empty)(mock[ExecutionContext]).head.asInstanceOf[MockEstimator]
}
object EstimatorAsFactorySpec {
class MockEstimator extends Estimator[Transformer] {
val param = NumericParam("b", Some("desc"))
setDefault(param -> 5)
override val params: Array[Param[_]] = Array(param)
override private[deeplang] def _fit(ctx: ExecutionContext, df: DataFrame): Transformer = ???
override private[deeplang] def _fit_infer(schema: Option[StructType]): Transformer = ???
override def report: Report = ???
}
class MockEstimatorFactory extends EstimatorAsFactory[MockEstimator] {
override val id: Id = Id.randomId
override val name: String = "Mock Estimator factory used for tests purposes"
override val description: String = "Description"
}
}
| deepsense-io/seahorse-workflow-executor | deeplang/src/test/scala/io/deepsense/deeplang/doperations/EstimatorAsFactorySpec.scala | Scala | apache-2.0 | 3,874 |
package com.twitter.finagle.http
/**
* This puts it all together: The HTTP codec itself.
*/
import com.twitter.conversions.storage._
import com.twitter.finagle._
import com.twitter.finagle.transport.Transport
import com.twitter.finagle.http.codec._
import com.twitter.finagle.stats.{StatsReceiver, NullStatsReceiver}
import com.twitter.finagle.tracing._
import com.twitter.util.{Try, StorageUnit, Future}
import java.net.InetSocketAddress
import org.jboss.netty.channel.{
ChannelPipelineFactory, UpstreamMessageEvent, Channels,
ChannelEvent, ChannelHandlerContext, SimpleChannelDownstreamHandler, MessageEvent}
import org.jboss.netty.handler.codec.http._
case class BadHttpRequest(httpVersion: HttpVersion, method: HttpMethod, uri: String, codecError: String)
extends DefaultHttpRequest(httpVersion, method, uri)
object BadHttpRequest {
def apply(codecError: String) =
new BadHttpRequest(HttpVersion.HTTP_1_0, HttpMethod.GET, "/bad-http-request", codecError)
}
/** Convert exceptions to BadHttpRequests */
class SafeHttpServerCodec(
maxInitialLineLength: Int,
maxHeaderSize: Int,
maxChunkSize: Int)
extends HttpServerCodec(maxInitialLineLength, maxHeaderSize, maxChunkSize)
{
override def handleUpstream(ctx: ChannelHandlerContext, e: ChannelEvent) {
// this only catches Codec exceptions -- when a handler calls sendUpStream(), it
// rescues exceptions from the upstream handlers and calls notifyHandlerException(),
// which doesn't throw exceptions.
try {
super.handleUpstream(ctx, e)
} catch {
case ex: Exception =>
val channel = ctx.getChannel()
ctx.sendUpstream(new UpstreamMessageEvent(
channel, BadHttpRequest(ex.toString()), channel.getRemoteAddress()))
}
}
}
/* Respond to BadHttpRequests with 400 errors */
abstract class CheckRequestFilter[Req](
statsReceiver: StatsReceiver = NullStatsReceiver)
extends SimpleFilter[Req, HttpResponse]
{
private[this] val badRequestCount = statsReceiver.counter("bad_requests")
private[this] val BadRequestResponse =
Response(HttpVersion.HTTP_1_0, HttpResponseStatus.BAD_REQUEST)
def apply(request: Req, service: Service[Req, HttpResponse]) = {
toHttpRequest(request) match {
case httpRequest: BadHttpRequest =>
badRequestCount.incr()
// BadRequstResponse will cause ServerConnectionManager to close the channel.
Future.value(BadRequestResponse)
case _ =>
service(request)
}
}
def toHttpRequest(request: Req): HttpRequest
}
private[this] class CheckHttpRequestFilter extends CheckRequestFilter[HttpRequest]
{
val BadRequestResponse =
Response(HttpVersion.HTTP_1_0, HttpResponseStatus.BAD_REQUEST)
def toHttpRequest(request: HttpRequest) = request
}
case class Http(
_compressionLevel: Int = 0,
_maxRequestSize: StorageUnit = 1.megabyte,
_maxResponseSize: StorageUnit = 1.megabyte,
_decompressionEnabled: Boolean = true,
_channelBufferUsageTracker: Option[ChannelBufferUsageTracker] = None,
_annotateCipherHeader: Option[String] = None,
_enableTracing: Boolean = false,
_maxInitialLineLength: StorageUnit = 4096.bytes,
_maxHeaderSize: StorageUnit = 8192.bytes)
extends CodecFactory[HttpRequest, HttpResponse]
{
def compressionLevel(level: Int) = copy(_compressionLevel = level)
def maxRequestSize(bufferSize: StorageUnit) = copy(_maxRequestSize = bufferSize)
def maxResponseSize(bufferSize: StorageUnit) = copy(_maxResponseSize = bufferSize)
def decompressionEnabled(yesno: Boolean) = copy(_decompressionEnabled = yesno)
def channelBufferUsageTracker(usageTracker: ChannelBufferUsageTracker) =
copy(_channelBufferUsageTracker = Some(usageTracker))
def annotateCipherHeader(headerName: String) = copy(_annotateCipherHeader = Option(headerName))
def enableTracing(enable: Boolean) = copy(_enableTracing = enable)
def client = { config =>
new Codec[HttpRequest, HttpResponse] {
def pipelineFactory = new ChannelPipelineFactory {
def getPipeline() = {
val pipeline = Channels.pipeline()
pipeline.addLast("httpCodec", new HttpClientCodec())
pipeline.addLast(
"httpDechunker",
new HttpChunkAggregator(_maxResponseSize.inBytes.toInt))
if (_decompressionEnabled)
pipeline.addLast("httpDecompressor", new HttpContentDecompressor)
pipeline
}
}
override def prepareConnFactory(
underlying: ServiceFactory[HttpRequest, HttpResponse]
): ServiceFactory[HttpRequest, HttpResponse] =
if (_enableTracing) {
new HttpClientTracingFilter[HttpRequest, HttpResponse](config.serviceName) andThen
super.prepareConnFactory(underlying)
} else
super.prepareConnFactory(underlying)
override def newClientDispatcher(transport: Transport[HttpRequest, HttpResponse]) =
new HttpClientDispatcher(transport)
}
}
def server = { config =>
new Codec[HttpRequest, HttpResponse] {
def pipelineFactory = new ChannelPipelineFactory {
def getPipeline() = {
val pipeline = Channels.pipeline()
if (_channelBufferUsageTracker.isDefined) {
pipeline.addLast(
"channelBufferManager", new ChannelBufferManager(_channelBufferUsageTracker.get))
}
val maxRequestSizeInBytes = _maxRequestSize.inBytes.toInt
val maxInitialLineLengthInBytes = _maxInitialLineLength.inBytes.toInt
val maxHeaderSizeInBytes = _maxHeaderSize.inBytes.toInt
pipeline.addLast("httpCodec", new SafeHttpServerCodec(maxInitialLineLengthInBytes, maxHeaderSizeInBytes, maxRequestSizeInBytes))
if (_compressionLevel > 0) {
pipeline.addLast(
"httpCompressor",
new HttpContentCompressor(_compressionLevel))
}
// Response to ``Expect: Continue'' requests.
pipeline.addLast("respondToExpectContinue", new RespondToExpectContinue)
pipeline.addLast(
"httpDechunker",
new HttpChunkAggregator(maxRequestSizeInBytes))
_annotateCipherHeader foreach { headerName: String =>
pipeline.addLast("annotateCipher", new AnnotateCipher(headerName))
}
pipeline.addLast(
"connectionLifecycleManager",
new ServerConnectionManager)
pipeline
}
}
override def prepareConnFactory(
underlying: ServiceFactory[HttpRequest, HttpResponse]
): ServiceFactory[HttpRequest, HttpResponse] = {
val checkRequest = new CheckHttpRequestFilter
if (_enableTracing) {
val tracingFilter = new HttpServerTracingFilter[HttpRequest, HttpResponse](
config.serviceName,
config.boundInetSocketAddress
)
tracingFilter andThen checkRequest andThen underlying
} else {
checkRequest andThen underlying
}
}
}
}
}
object Http {
def get() = new Http()
}
object HttpTracing {
object Header {
val TraceId = "X-B3-TraceId"
val SpanId = "X-B3-SpanId"
val ParentSpanId = "X-B3-ParentSpanId"
val Sampled = "X-B3-Sampled"
val Flags = "X-B3-Flags"
val All = Seq(TraceId, SpanId, ParentSpanId, Sampled, Flags)
val Required = Seq(TraceId, SpanId)
}
/**
* Remove any parameters from url.
*/
private[http] def stripParameters(uri: String): String = {
uri.indexOf('?') match {
case -1 => uri
case n => uri.substring(0, n)
}
}
}
/**
* Pass along headers with the required tracing information.
*/
class HttpClientTracingFilter[Req <: HttpRequest, Res](serviceName: String)
extends SimpleFilter[Req, Res]
{
import HttpTracing._
def apply(request: Req, service: Service[Req, Res]) = Trace.unwind {
Header.All foreach { request.removeHeader(_) }
request.addHeader(Header.TraceId, Trace.id.traceId.toString)
request.addHeader(Header.SpanId, Trace.id.spanId.toString)
// no parent id set means this is the root span
Trace.id._parentId foreach { id =>
request.addHeader(Header.ParentSpanId, id.toString)
}
// three states of sampled, yes, no or none (let the server decide)
Trace.id.sampled foreach { sampled =>
request.addHeader(Header.Sampled, sampled.toString)
}
request.addHeader(Header.Flags, Trace.id.flags.toLong)
if (Trace.isActivelyTracing) {
Trace.recordRpcname(serviceName, request.getMethod.getName)
Trace.recordBinary("http.uri", stripParameters(request.getUri))
Trace.record(Annotation.ClientSend())
service(request) map { response =>
Trace.record(Annotation.ClientRecv())
response
}
}
else
service(request)
}
}
/**
* Adds tracing annotations for each http request we receive.
* Including uri, when request was sent and when it was received.
*/
class HttpServerTracingFilter[Req <: HttpRequest, Res](serviceName: String, boundAddress: InetSocketAddress)
extends SimpleFilter[Req, Res]
{
import HttpTracing._
def apply(request: Req, service: Service[Req, Res]) = Trace.unwind {
if (Header.Required.forall { request.containsHeader(_) }) {
val spanId = SpanId.fromString(request.getHeader(Header.SpanId))
spanId foreach { sid =>
val traceId = SpanId.fromString(request.getHeader(Header.TraceId))
val parentSpanId = SpanId.fromString(request.getHeader(Header.ParentSpanId))
val sampled = Option(request.getHeader(Header.Sampled)) flatMap { sampled =>
Try(sampled.toBoolean).toOption
}
val flags = getFlags(request)
Trace.setId(TraceId(traceId, parentSpanId, sid, sampled, flags))
}
} else if (request.containsHeader(Header.Flags)) {
// even if there are no id headers we want to get the debug flag
// this is to allow developers to just set the debug flag to ensure their
// trace is collected
Trace.setId(Trace.id.copy(flags = getFlags(request)))
}
// remove so the header is not visible to users
Header.All foreach { request.removeHeader(_) }
// even if no trace id was passed from the client we log the annotations
// with a locally generated id
if (Trace.isActivelyTracing) {
Trace.recordRpcname(serviceName, request.getMethod.getName)
Trace.recordBinary("http.uri", stripParameters(request.getUri))
Trace.record(Annotation.ServerRecv())
service(request) map { response =>
Trace.record(Annotation.ServerSend())
response
}
}
else
service(request)
}
/**
* Safely extract the flags from the header, if they exist. Otherwise return empty flag.
*/
def getFlags(request: Req): Flags = {
try {
Flags(Option(request.getHeader(Header.Flags)).map(_.toLong).getOrElse(0L))
} catch {
case _ => Flags()
}
}
}
/**
* Http codec for rich Request/Response objects.
* Note the CheckHttpRequestFilter isn't baked in, as in the Http Codec.
*/
case class RichHttp[REQUEST <: Request](
httpFactory: Http)
extends CodecFactory[REQUEST, Response] {
def client = { config =>
new Codec[REQUEST, Response] {
def pipelineFactory = new ChannelPipelineFactory {
def getPipeline() = {
val pipeline = httpFactory.client(null).pipelineFactory.getPipeline()
pipeline.addLast("requestDecoder", new RequestEncoder)
pipeline.addLast("responseEncoder", new ResponseDecoder)
pipeline
}
}
override def prepareConnFactory(
underlying: ServiceFactory[REQUEST, Response]
): ServiceFactory[REQUEST, Response] =
if (httpFactory._enableTracing)
new HttpClientTracingFilter[REQUEST, Response](config.serviceName) andThen underlying
else
underlying
override def newClientDispatcher(transport: Transport[REQUEST, Response]) =
new HttpClientDispatcher(transport)
}
}
def server = { config =>
new Codec[REQUEST, Response] {
def pipelineFactory = new ChannelPipelineFactory {
def getPipeline() = {
val pipeline = httpFactory.server(null).pipelineFactory.getPipeline()
pipeline.addLast("serverRequestDecoder", new RequestDecoder)
pipeline.addLast("serverResponseEncoder", new ResponseEncoder)
pipeline
}
}
override def prepareConnFactory(
underlying: ServiceFactory[REQUEST, Response]
): ServiceFactory[REQUEST, Response] =
if (httpFactory._enableTracing) {
val tracingFilter = new HttpServerTracingFilter[REQUEST, Response](config.serviceName, config.boundInetSocketAddress)
tracingFilter andThen underlying
} else {
underlying
}
}
}
}
| stevegury/finagle | finagle-http/src/main/scala/com/twitter/finagle/http/Codec.scala | Scala | apache-2.0 | 12,863 |
package edu.rice.habanero.benchmarks.big
import edu.rice.habanero.actors.{FuncJavaActor, FuncJavaActorState, FuncJavaPool}
import edu.rice.habanero.benchmarks.big.BigConfig.{ExitMessage, Message, PingMessage, PongMessage}
import edu.rice.habanero.benchmarks.{Benchmark, BenchmarkRunner, PseudoRandom}
/**
* @author <a href="http://shams.web.rice.edu/">Shams Imam</a> (shams@rice.edu)
*/
object BigFuncJavaActorBenchmark {
def main(args: Array[String]) {
BenchmarkRunner.runBenchmark(args, new BigFuncJavaActorBenchmark)
}
private final class BigFuncJavaActorBenchmark extends Benchmark {
def initialize(args: Array[String]) {
BigConfig.parseArgs(args)
}
def printArgInfo() {
BigConfig.printArgs()
}
def runIteration() {
val sinkActor = new SinkActor(BigConfig.W)
sinkActor.start()
val bigActors = Array.tabulate[FuncJavaActor[AnyRef]](BigConfig.W)(i => {
val loopActor = new BigActor(i, BigConfig.N, sinkActor)
loopActor.start()
loopActor
})
val neighborMessage = new NeighborMessage(bigActors)
sinkActor.send(neighborMessage)
bigActors.foreach(loopActor => {
loopActor.send(neighborMessage)
})
bigActors.foreach(loopActor => {
loopActor.send(new PongMessage(-1))
})
FuncJavaActorState.awaitTermination()
}
def cleanupIteration(lastIteration: Boolean, execTimeMillis: Double): Unit = {
if (lastIteration) {
FuncJavaPool.shutdown()
}
}
}
private case class NeighborMessage(neighbors: Array[FuncJavaActor[AnyRef]]) extends Message
private class BigActor(id: Int, numMessages: Int, sinkActor: FuncJavaActor[AnyRef]) extends FuncJavaActor[AnyRef] {
private var numPings = 0
private var expPinger = -1
private val random = new PseudoRandom(id)
private var neighbors: Array[FuncJavaActor[AnyRef]] = null
private val myPingMessage = new PingMessage(id)
private val myPongMessage = new PongMessage(id)
override def process(msg: AnyRef) {
msg match {
case pm: PingMessage =>
val sender = neighbors(pm.sender)
sender.send(myPongMessage)
case pm: PongMessage =>
if (pm.sender != expPinger) {
println("ERROR: Expected: " + expPinger + ", but received ping from " + pm.sender)
}
if (numPings == numMessages) {
sinkActor.send(ExitMessage.ONLY)
} else {
sendPing()
numPings += 1
}
case em: ExitMessage =>
exit()
case nm: NeighborMessage =>
neighbors = nm.neighbors
}
}
private def sendPing(): Unit = {
val target = random.nextInt(neighbors.size)
val targetActor = neighbors(target)
expPinger = target
targetActor.send(myPingMessage)
}
}
private class SinkActor(numWorkers: Int) extends FuncJavaActor[AnyRef] {
private var numMessages = 0
private var neighbors: Array[FuncJavaActor[AnyRef]] = null
override def process(msg: AnyRef) {
msg match {
case em: ExitMessage =>
numMessages += 1
if (numMessages == numWorkers) {
neighbors.foreach(loopWorker => loopWorker.send(ExitMessage.ONLY))
exit()
}
case nm: NeighborMessage =>
neighbors = nm.neighbors
}
}
}
}
| shamsmahmood/savina | src/main/scala/edu/rice/habanero/benchmarks/big/BigFuncJavaActorBenchmark.scala | Scala | gpl-2.0 | 3,422 |
package geotrellis.util
import geotrellis.config.Dataset
import geotrellis.spark.io.hadoop.formats.TemporalGeoTiffInputFormat
import geotrellis.spark.io.kryo.KryoRegistrator
import geotrellis.spark.util.SparkUtils
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.serializer.KryoSerializer
trait SparkSupport {
implicit val sc: SparkContext
@transient lazy val conf = SparkUtils.hadoopConfiguration
}
object SparkSupport {
def sparkContext(timeTag: String = "ISO_TIME", timeFormat: String = "yyyy-MM-dd'T'HH:mm:ss"): SparkContext =
configureTime(timeTag, timeFormat)(
new SparkContext(
new SparkConf()
.setAppName("GeoTrellis Integration Tests")
.set("spark.serializer", classOf[KryoSerializer].getName)
.set("spark.kryo.registrator", classOf[KryoRegistrator].getName)
.setJars(SparkContext.jarOfObject(this).toList)
)
)
def configureTime(timeTag: String, timeFormat: String)(implicit sc: SparkContext): SparkContext = {
TemporalGeoTiffInputFormat.setTimeTag(sc.hadoopConfiguration, timeTag)
TemporalGeoTiffInputFormat.setTimeFormat(sc.hadoopConfiguration, timeFormat)
sc
}
def configureTime(dataset: Dataset)(implicit sc: SparkContext): SparkContext = {
dataset.output.keyIndexMethod.timeTag.foreach(TemporalGeoTiffInputFormat.setTimeTag(sc.hadoopConfiguration, _))
dataset.output.keyIndexMethod.timeFormat.foreach(TemporalGeoTiffInputFormat.setTimeFormat(sc.hadoopConfiguration, _))
sc
}
}
| geotrellis/geotrellis-integration-tests-tool | src/main/scala/geotrellis/util/SparkSupport.scala | Scala | apache-2.0 | 1,531 |
/*
* Copyright 2013 - 2020 Outworkers Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.outworkers.phantom.builder.serializers
import com.outworkers.phantom.builder.QueryBuilder.Utils
import com.outworkers.phantom.builder.query.engine.CQLQuery
import com.outworkers.phantom.builder.syntax.CQLSyntax
private[builder] class UpdateQueryBuilder {
def onlyIf(clause: CQLQuery): CQLQuery = {
CQLQuery(CQLSyntax.IF).forcePad.append(clause)
}
val ifExists: CQLQuery = CQLQuery(CQLSyntax.ifExists)
private[this] def counterSetter(column: String, op: String, value: String): CQLQuery = {
CQLQuery(column).forcePad.append(CQLSyntax.Symbols.eqs)
.forcePad.append(column)
.forcePad.append(op)
.forcePad.append(value)
}
def increment(column: String, value: String): CQLQuery = {
counterSetter(column, CQLSyntax.Symbols.plus, value)
}
def decrement(column: String, value: String): CQLQuery = {
counterSetter(column, CQLSyntax.Symbols.-, value)
}
def setTo(column: String, value: String): CQLQuery = {
Utils.concat(column, CQLSyntax.Symbols.eqs, value)
}
def set(clause: CQLQuery): CQLQuery = {
CQLQuery(CQLSyntax.set).forcePad.append(clause)
}
def where(condition: CQLQuery): CQLQuery = {
Utils.operator(CQLSyntax.where, condition)
}
def and(condition: CQLQuery): CQLQuery = {
Utils.operator(CQLSyntax.and, condition)
}
def clauses(clauses: Seq[CQLQuery], sep: String = " "): CQLQuery = {
CQLQuery.empty.append(clauses.map(_.queryString).mkString(sep))
}
def chain(clauses: Seq[CQLQuery]): CQLQuery = {
CQLQuery.empty.append(clauses.map(_.queryString).mkString(", "))
}
def usingPart(queries: Seq[CQLQuery]): CQLQuery = {
CQLQuery(CQLSyntax.using)
.forcePad
.append(clauses(queries, " " + CQLSyntax.And + " "))
}
def update(tableName: String): CQLQuery = {
CQLQuery(CQLSyntax.update)
.forcePad.append(tableName)
}
def updateMapColumn(column: String, key: String, value: String): CQLQuery = {
qUtils.mapKey(column, key)
.forcePad.append(CQLSyntax.Symbols.eqs)
.forcePad.append(value)
}
}
| outworkers/phantom | phantom-dsl/src/main/scala/com/outworkers/phantom/builder/serializers/UpdateQueryBuilder.scala | Scala | apache-2.0 | 2,673 |
package com.github.cloudinaboxsoftware.vaadin.util
import com.codahale.logula.Logging
import org.apache.log4j.Level
object LogUtil extends Logging {
Logging.configure {
log =>
log.registerWithJMX = true
if (Env.logging == "true")
log.level = Level.TRACE
else
log.level = Level.OFF
log.console.enabled = true
log.console.threshold = Level.toLevel(Env.consoleLogging)
log.file.enabled = true
log.file.filename = Env.logPath
log.file.threshold = Level.ALL
log.file.maxSize = 10 * 1024 // KB
log.file.retainedFiles = 4
}
val Log = log
}
| CloudInABox/scalavaadinutils | src/main/scala/com/github/cloudinaboxsoftware/vaadin/util/Log.scala | Scala | mit | 627 |
package com.karasiq.shadowcloud.actors
import akka.actor.{Actor, ActorLogging, ActorRef, NotInfluenceReceiveTimeout, PossiblyHarmful, Props, Status}
import akka.pattern.pipe
import akka.stream._
import akka.stream.scaladsl.{Sink, Source}
import com.karasiq.shadowcloud.ShadowCloud
import com.karasiq.shadowcloud.actors.ChunkIODispatcher.ChunkPath
import com.karasiq.shadowcloud.actors.events.StorageEvents
import com.karasiq.shadowcloud.actors.messages.StorageEnvelope
import com.karasiq.shadowcloud.actors.utils.MessageStatus
import com.karasiq.shadowcloud.index.diffs.IndexDiff
import com.karasiq.shadowcloud.model.utils.StorageHealth
import com.karasiq.shadowcloud.model.{Chunk, StorageId}
import com.karasiq.shadowcloud.providers.LifecycleHook
import com.karasiq.shadowcloud.storage.StorageHealthProvider
import com.karasiq.shadowcloud.storage.props.StorageProps
import com.karasiq.shadowcloud.streams.utils.AkkaStreamUtils
import scala.concurrent.duration._
import scala.util.{Success, Try}
object StorageDispatcher {
// Messages
sealed trait Message
final case class GetHealth(checkNow: Boolean = false) extends Message with NotInfluenceReceiveTimeout
object GetHealth extends MessageStatus[StorageId, StorageHealth]
// Internal messages
private sealed trait InternalMessage extends Message with PossiblyHarmful
private case class WriteChunkToIndex(path: ChunkPath, chunk: Chunk) extends InternalMessage
// Props
def props(
storageId: StorageId,
storageProps: StorageProps,
index: ActorRef,
chunkIO: ActorRef,
health: StorageHealthProvider,
lifecycleHook: Option[LifecycleHook] = None
): Props = {
Props(new StorageDispatcher(storageId, storageProps, index, chunkIO, health, lifecycleHook))
}
}
private final class StorageDispatcher(
storageId: StorageId,
storageProps: StorageProps,
index: ActorRef,
chunkIO: ActorRef,
healthProvider: StorageHealthProvider,
lifecycleHook: Option[LifecycleHook] = None
) extends Actor
with ActorLogging {
import StorageDispatcher._
// -----------------------------------------------------------------------
// Context
// -----------------------------------------------------------------------
import context.dispatcher
private[this] val sc = ShadowCloud()
private[this] val config = sc.configs.storageConfig(storageId, storageProps)
private[this] val healthCheckSchedule =
context.system.scheduler.scheduleWithFixedDelay(Duration.Zero, config.healthCheckInterval, self, GetHealth(true))
import sc.implicits.materializer
// -----------------------------------------------------------------------
// State
// -----------------------------------------------------------------------
private[this] var health: StorageHealth = StorageHealth.empty
// -----------------------------------------------------------------------
// Streams
// -----------------------------------------------------------------------
private[this] val pendingIndexQueue = Source
.queue[(ChunkPath, Chunk)](sc.config.queues.chunksIndex, OverflowStrategy.dropNew)
.via(AkkaStreamUtils.groupedOrInstant(sc.config.queues.chunksIndex, sc.config.queues.chunksIndexTime))
.filter(_.nonEmpty)
.mapConcat(_.groupBy(_._1.regionId).map {
case (regionId, chunks) ⇒
StorageIndex.Envelope(regionId, RegionIndex.WriteDiff(IndexDiff.newChunks(chunks.map(_._2): _*)))
})
.withAttributes(ActorAttributes.supervisionStrategy(Supervision.resumingDecider))
.to(Sink.actorRef(index, Status.Failure(new IllegalStateException("Queue stopped"))))
.named("pendingIndexQueue")
.run()
// -----------------------------------------------------------------------
// Receive
// -----------------------------------------------------------------------
def receive: Receive = {
// -----------------------------------------------------------------------
// Chunk commands
// -----------------------------------------------------------------------
case msg: ChunkIODispatcher.Message ⇒
chunkIO.forward(msg)
// -----------------------------------------------------------------------
// Index commands
// -----------------------------------------------------------------------
case msg: StorageIndex.Message ⇒
index.forward(msg)
case WriteChunkToIndex(path, chunk) ⇒
val scheduler = context.system.scheduler
pendingIndexQueue.offer((path, chunk)).onComplete {
case Success(QueueOfferResult.Enqueued) ⇒
// Pass
case _ ⇒
log.warning("Rescheduling chunk index write: {}/{}", path, chunk)
scheduler.scheduleOnce(15 seconds, sc.actors.regionSupervisor, StorageEnvelope(storageId, WriteChunkToIndex(path, chunk)))
}
// -----------------------------------------------------------------------
// Storage health
// -----------------------------------------------------------------------
case GetHealth(checkNow) ⇒
if (checkNow) {
val future = GetHealth.wrapFuture(storageId, healthProvider.health)
future.pipeTo(self).pipeTo(sender())
} else {
sender() ! GetHealth.Success(storageId, health)
}
case GetHealth.Success(`storageId`, health) ⇒
updateHealth(_ ⇒ health)
case GetHealth.Failure(`storageId`, error) ⇒
updateHealth(_.copy(online = false))
log.error(error, "Health update failure: {}", storageId)
// -----------------------------------------------------------------------
// Storage events
// -----------------------------------------------------------------------
case StorageEnvelope(`storageId`, event: StorageEvents.Event) ⇒
event match {
case StorageEvents.ChunkWritten(path, chunk) ⇒
log.debug("Appending new chunk to index: {}", chunk)
self ! WriteChunkToIndex(path, chunk.withoutData)
val written = chunk.checksum.encSize
log.debug("{} bytes written, updating storage health", written)
updateHealth(_ - written)
case _ ⇒ // Ignore
}
}
// -----------------------------------------------------------------------
// Utils
// -----------------------------------------------------------------------
def updateHealth(func: StorageHealth ⇒ StorageHealth): Unit = {
this.health = func(this.health)
log.debug("Storage [{}] health updated: {}", storageId, health)
sc.eventStreams.publishStorageEvent(storageId, StorageEvents.HealthUpdated(health))
}
// -----------------------------------------------------------------------
// Lifecycle hooks
// -----------------------------------------------------------------------
override def preStart(): Unit = {
super.preStart()
context.watch(chunkIO)
context.watch(index)
sc.eventStreams.storage.subscribe(self, storageId)
lifecycleHook.foreach(_.initialize())
}
override def postStop(): Unit = {
Try(lifecycleHook.foreach(_.shutdown())).failed.foreach(log.error(_, "Error in shutdown hook"))
sc.eventStreams.storage.unsubscribe(self)
healthCheckSchedule.cancel()
pendingIndexQueue.complete()
super.postStop()
}
}
| Karasiq/shadowcloud | core/src/main/scala/com/karasiq/shadowcloud/actors/StorageDispatcher.scala | Scala | apache-2.0 | 7,297 |
package no.nr.edvard.osiris
import model.MethodCorpus
import xml.Elem
import java.lang.String
class HtmlCorpusRenderer {
def render(corpusTitle: String, corpus: MethodCorpus): Elem = {
require(corpusTitle != null && corpus != null)
def doc =
<html>
{ head }
{ body }
</html>;
def head =
<head>
<title>Osiris corpus sample</title>
<link rel="stylesheet"
href="../src/main/resources/blueprint-css/screen.css"
type="text/css"
media="screen, projection" />
<link rel="stylesheet"
href="../src/main/resources/blueprint-css/print.css"
type="text/css"
media="print" />
<link rel="stylesheet"
href="../src/main/resources/osiris-report.css"
type="text/css"
media="screen, projection, print" />
<script type="text/javascript"
src="../src/main/resources/syntaxhighlighter/scripts/shCore.js">
</script>
<script
type="text/javascript"
src="../src/main/resources/syntaxhighlighter/scripts/shBrushJava.js">
</script>
<link
href="../src/main/resources/syntaxhighlighter/styles/shCore.css"
rel="stylesheet"
type="text/css" />
<link
href=
"../src/main/resources/syntaxhighlighter/styles/shThemeDefault.css"
rel="stylesheet"
type="text/css" />
<script>SyntaxHighlighter.all()</script>
</head>;
def body =
<body>
<div class="container">
{ header }
<hr />
{ corpusSample }
</div>
</body>;
def header = <h1>Osiris Corpus Sample -- { corpusTitle }</h1>;
def corpusSample =
corpus.par.map(sample =>
<div class="span-24 last"> {
sample.sourceCode match {
case Some(sourceCode) =>
<div class="span-20 last">
<div class="codeSample">
<div class="extraSampleSectionTitle">{ corpusTitle }</div>
<div class="padding">
<p class="header">
Method <strong>{ sample.name }</strong>
in class <strong>{ sample._type.name }</strong>
in application
<!-- <strong>{ sample.application.name }</strong> -->
</p>
<pre class="brush: java">{
sloppyBreakLines(sourceCode).trim
}</pre>
</div>
</div>
</div>
<div class="span-4 last">
</div>;
case None =>
<p class="noSourceInfo span-16">
{ "Could not find source for (app:%s, cls:%s, m:%s)###".format(
"sample.application.name",
sample._type.name,
sample.name
)
}
</p>;
}
}
</div>
).toList
doc
}
private def sloppyBreakLines(text: String, maxLength: Int = 95) = {
def split(line: String, bestPos: Int): String = {
val p = line.indexOf(' ', bestPos+2)
require(p == -1 || p >= 1)
if (line.length <= maxLength)
line
else if (p == -1 || p > maxLength) {
val pp = if (bestPos >= 1) bestPos else maxLength
line.substring(0, pp) + "\\n" + split(line.substring(pp), 1)
} else
split(line, p)
}
text.split('\\n').map(split(_, -1)).mkString("\\n")
}
} | edwkar/edwbsc | projects/Osiris/src/main/scala/no/nr/edvard/osiris/HtmlCorpusRenderer.scala | Scala | gpl-2.0 | 3,649 |
package io.getquill.context.finagle.mysql
import java.time.{ LocalDate, LocalDateTime }
import java.util.{ Date, UUID }
import com.twitter.finagle.mysql._
import io.getquill.FinagleMysqlContext
import io.getquill.util.Messages.fail
import scala.reflect.{ ClassTag, classTag }
trait FinagleMysqlDecoders {
this: FinagleMysqlContext[_] =>
type Decoder[T] = FinagleMysqlDecoder[T]
case class FinagleMysqlDecoder[T](decoder: BaseDecoder[T]) extends BaseDecoder[T] {
override def apply(index: Index, row: ResultRow, session: Session) =
decoder(index, row, session)
}
def decoder[T: ClassTag](f: PartialFunction[Value, T]): Decoder[T] =
FinagleMysqlDecoder((index, row, session) => {
val value = row.values(index)
f.lift(value).getOrElse(fail(s"Value '$value' can't be decoded to '${classTag[T].runtimeClass}'"))
})
implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] =
FinagleMysqlDecoder((index, row, session) => {
row.values(index) match {
case NullValue => None
case _ => Some(d.decoder(index, row, session))
}
})
implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], d: Decoder[I]): Decoder[O] =
FinagleMysqlDecoder(mappedBaseDecoder(mapped, d.decoder))
implicit val stringDecoder: Decoder[String] =
decoder[String] {
case StringValue(v) => v
}
implicit val bigDecimalDecoder: Decoder[BigDecimal] =
decoder[BigDecimal] {
case BigDecimalValue(v) => v
}
implicit val booleanDecoder: Decoder[Boolean] =
decoder[Boolean] {
case ByteValue(v) => v == (1: Byte)
case ShortValue(v) => v == (1: Short)
case IntValue(v) => v == 1
case LongValue(v) => v == 1L
case v: RawValue if v.typ == Type.Bit => v.bytes.head == (1: Byte)
}
implicit val byteDecoder: Decoder[Byte] =
decoder[Byte] {
case ByteValue(v) => v
case ShortValue(v) => v.toByte
}
implicit val shortDecoder: Decoder[Short] =
decoder[Short] {
case ShortValue(v) => v
}
implicit val intDecoder: Decoder[Int] =
decoder[Int] {
case IntValue(v) => v
case LongValue(v) => v.toInt
}
implicit val longDecoder: Decoder[Long] =
decoder[Long] {
case IntValue(v) => v.toLong
case LongValue(v) => v
}
implicit val floatDecoder: Decoder[Float] =
decoder[Float] {
case FloatValue(v) => v
}
implicit val doubleDecoder: Decoder[Double] =
decoder[Double] {
case DoubleValue(v) => v
}
implicit val byteArrayDecoder: Decoder[Array[Byte]] =
decoder[Array[Byte]] {
case v: RawValue => v.bytes
}
implicit val dateDecoder: Decoder[Date] =
decoder[Date] {
case `timestampValue`(v) => new Date(v.getTime)
case DateValue(d) => new Date(d.getTime)
}
implicit val localDateDecoder: Decoder[LocalDate] = decoder[LocalDate] {
case `timestampValue`(v) => v.toLocalDateTime.toLocalDate
case DateValue(d) => d.toLocalDate
}
implicit val localDateTimeDecoder: Decoder[LocalDateTime] = decoder[LocalDateTime] {
case `timestampValue`(v) => v.toInstant.atZone(extractionTimeZone.toZoneId).toLocalDateTime
}
implicit val uuidDecoder: Decoder[UUID] = mappedDecoder(MappedEncoding(UUID.fromString), stringDecoder)
}
| getquill/quill | quill-finagle-mysql/src/main/scala/io/getquill/context/finagle/mysql/FinagleMysqlDecoders.scala | Scala | apache-2.0 | 3,413 |
package org.monalang.monac.symbol
import org.monalang.monac.parsing.ASTNode
// entities that can be referred to through source code
abstract class Symbol
case class ASTSymbol(node: ASTNode) extends Symbol
case class ArgumentMarker() extends Symbol
// modules etc | corazza/monac-scala | monac/src/main/scala/org/monalang/monac/symbol/Symbol.scala | Scala | gpl-3.0 | 267 |
/*
* Copyright (C) 2010 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.core.tools.script
import groovy.lang.Binding
import java.io.File
import org.openmole.core.tools.service.ObjectPool
object GroovyProxyPool {
def apply(code: String, jars: Iterable[File] = Iterable.empty) = new GroovyProxyPool(code, jars)
}
class GroovyProxyPool(code: String, jars: Iterable[File] = Iterable.empty) extends GroovyFunction {
//Don't use soft reference here, it leads to keep compiling the script in case of high memory load and make it worse
@transient lazy private val pool = new ObjectPool({ new GroovyProxy(code, jars) })
def apply(binding: Binding) = execute(binding)
def execute(binding: Binding): Object = pool.exec {
_.executeUnsynchronized(binding)
}
private def release(o: GroovyProxy) = pool.release(o)
private def borrow: GroovyProxy = pool.borrow
}
| ISCPIF/PSEExperiments | openmole-src/openmole/core/org.openmole.core.tools/src/main/scala/org/openmole/core/tools/script/GroovyProxyPool.scala | Scala | agpl-3.0 | 1,543 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import java.io.{FileNotFoundException, IOException}
import org.apache.parquet.io.ParquetDecodingException
import org.apache.spark.{Partition => RDDPartition, SparkUpgradeException, TaskContext}
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.rdd.{InputFileBlockHolder, RDD}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.execution.QueryExecutionException
import org.apache.spark.sql.vectorized.ColumnarBatch
import org.apache.spark.util.NextIterator
/**
* A part (i.e. "block") of a single file that should be read, along with partition column values
* that need to be prepended to each row.
*
* @param partitionValues value of partition columns to be prepended to each row.
* @param filePath URI of the file to read
* @param start the beginning offset (in bytes) of the block.
* @param length number of bytes to read.
* @param locations locality information (list of nodes that have the data).
*/
case class PartitionedFile(
partitionValues: InternalRow,
filePath: String,
start: Long,
length: Long,
@transient locations: Array[String] = Array.empty) {
override def toString: String = {
s"path: $filePath, range: $start-${start + length}, partition values: $partitionValues"
}
}
/**
* An RDD that scans a list of file partitions.
*/
class FileScanRDD(
@transient private val sparkSession: SparkSession,
readFunction: (PartitionedFile) => Iterator[InternalRow],
@transient val filePartitions: Seq[FilePartition])
extends RDD[InternalRow](sparkSession.sparkContext, Nil) {
private val ignoreCorruptFiles = sparkSession.sessionState.conf.ignoreCorruptFiles
private val ignoreMissingFiles = sparkSession.sessionState.conf.ignoreMissingFiles
override def compute(split: RDDPartition, context: TaskContext): Iterator[InternalRow] = {
val iterator = new Iterator[Object] with AutoCloseable {
private val inputMetrics = context.taskMetrics().inputMetrics
private val existingBytesRead = inputMetrics.bytesRead
// Find a function that will return the FileSystem bytes read by this thread. Do this before
// apply readFunction, because it might read some bytes.
private val getBytesReadCallback =
SparkHadoopUtil.get.getFSBytesReadOnThreadCallback()
// We get our input bytes from thread-local Hadoop FileSystem statistics.
// If we do a coalesce, however, we are likely to compute multiple partitions in the same
// task and in the same thread, in which case we need to avoid override values written by
// previous partitions (SPARK-13071).
private def incTaskInputMetricsBytesRead(): Unit = {
inputMetrics.setBytesRead(existingBytesRead + getBytesReadCallback())
}
private[this] val files = split.asInstanceOf[FilePartition].files.toIterator
private[this] var currentFile: PartitionedFile = null
private[this] var currentIterator: Iterator[Object] = null
def hasNext: Boolean = {
// Kill the task in case it has been marked as killed. This logic is from
// InterruptibleIterator, but we inline it here instead of wrapping the iterator in order
// to avoid performance overhead.
context.killTaskIfInterrupted()
(currentIterator != null && currentIterator.hasNext) || nextIterator()
}
def next(): Object = {
val nextElement = currentIterator.next()
// TODO: we should have a better separation of row based and batch based scan, so that we
// don't need to run this `if` for every record.
val preNumRecordsRead = inputMetrics.recordsRead
if (nextElement.isInstanceOf[ColumnarBatch]) {
incTaskInputMetricsBytesRead()
inputMetrics.incRecordsRead(nextElement.asInstanceOf[ColumnarBatch].numRows())
} else {
// too costly to update every record
if (inputMetrics.recordsRead %
SparkHadoopUtil.UPDATE_INPUT_METRICS_INTERVAL_RECORDS == 0) {
incTaskInputMetricsBytesRead()
}
inputMetrics.incRecordsRead(1)
}
nextElement
}
private def readCurrentFile(): Iterator[InternalRow] = {
try {
readFunction(currentFile)
} catch {
case e: FileNotFoundException =>
throw new FileNotFoundException(
e.getMessage + "\\n" +
"It is possible the underlying files have been updated. " +
"You can explicitly invalidate the cache in Spark by " +
"running 'REFRESH TABLE tableName' command in SQL or " +
"by recreating the Dataset/DataFrame involved.")
}
}
/** Advances to the next file. Returns true if a new non-empty iterator is available. */
private def nextIterator(): Boolean = {
if (files.hasNext) {
currentFile = files.next()
logInfo(s"Reading File $currentFile")
// Sets InputFileBlockHolder for the file block's information
InputFileBlockHolder.set(currentFile.filePath, currentFile.start, currentFile.length)
if (ignoreMissingFiles || ignoreCorruptFiles) {
currentIterator = new NextIterator[Object] {
// The readFunction may read some bytes before consuming the iterator, e.g.,
// vectorized Parquet reader. Here we use lazy val to delay the creation of
// iterator so that we will throw exception in `getNext`.
private lazy val internalIter = readCurrentFile()
override def getNext(): AnyRef = {
try {
if (internalIter.hasNext) {
internalIter.next()
} else {
finished = true
null
}
} catch {
case e: FileNotFoundException if ignoreMissingFiles =>
logWarning(s"Skipped missing file: $currentFile", e)
finished = true
null
// Throw FileNotFoundException even if `ignoreCorruptFiles` is true
case e: FileNotFoundException if !ignoreMissingFiles => throw e
case e @ (_: RuntimeException | _: IOException) if ignoreCorruptFiles =>
logWarning(
s"Skipped the rest of the content in the corrupted file: $currentFile", e)
finished = true
null
}
}
override def close(): Unit = {}
}
} else {
currentIterator = readCurrentFile()
}
try {
hasNext
} catch {
case e: SchemaColumnConvertNotSupportedException =>
val message = "Parquet column cannot be converted in " +
s"file ${currentFile.filePath}. Column: ${e.getColumn}, " +
s"Expected: ${e.getLogicalType}, Found: ${e.getPhysicalType}"
throw new QueryExecutionException(message, e)
case e: ParquetDecodingException =>
if (e.getCause.isInstanceOf[SparkUpgradeException]) {
throw e.getCause
} else if (e.getMessage.contains("Can not read value at")) {
val message = "Encounter error while reading parquet files. " +
"One possible cause: Parquet column cannot be converted in the " +
"corresponding files. Details: "
throw new QueryExecutionException(message, e)
}
throw e
}
} else {
currentFile = null
InputFileBlockHolder.unset()
false
}
}
override def close(): Unit = {
incTaskInputMetricsBytesRead()
InputFileBlockHolder.unset()
}
}
// Register an on-task-completion callback to close the input stream.
context.addTaskCompletionListener[Unit](_ => iterator.close())
iterator.asInstanceOf[Iterator[InternalRow]] // This is an erasure hack.
}
override protected def getPartitions: Array[RDDPartition] = filePartitions.toArray
override protected def getPreferredLocations(split: RDDPartition): Seq[String] = {
split.asInstanceOf[FilePartition].preferredLocations()
}
}
| witgo/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FileScanRDD.scala | Scala | apache-2.0 | 9,245 |
package de.frosner.broccoli.signal
import org.specs2.mock.Mockito
import org.specs2.mutable.Specification
import sun.misc.{Signal, SignalHandler}
class UnixSignalManagerIntegrationSpec extends Specification with Mockito {
"Registering new signal" should {
"trigger the handler when the signal is raised" in {
val manager = new UnixSignalManager()
val signal = new Signal("USR2")
val handler = mock[SignalHandler]
manager.register(signal, handler)
Signal.raise(signal)
Thread.sleep(1000)
there was one(handler).handle(signal)
}
}
}
| FRosner/cluster-broccoli | server/src/it/scala/de/frosner/broccoli/signal/UnixSignalManagerIntegrationSpec.scala | Scala | apache-2.0 | 587 |
/*
* Copyright 2010 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb {
package mongodb {
package record {
import common._
import field._
import http.js.JE._
import http.{LiftSession, S}
import json.JsonAST._
import util.Helpers._
import java.util.{Calendar, Date}
import org.bson.types.ObjectId
import org.specs.Specification
import org.specs.runner.JUnit4
import net.liftweb.record.field._
class CustomSerializersSpecsTest extends JUnit4(CustomSerializersSpecs)
package customserializersspecs {
case class Child(name: String, birthdate: Date) extends JsonObject[Child] {
def meta = Child
}
object Child extends JsonObjectMeta[Child]
/*
* Date as String
*/
class Person extends MongoRecord[Person] with MongoId[Person] {
def meta = Person
object children extends MongoJsonObjectListField(this, Child)
object firstBorn extends JsonObjectField(this, Child) {
def defaultValue = Child("", now)
}
}
object Person extends Person with MongoMetaRecord[Person]
/*
* Date as Date
*/
class Person2 extends MongoRecord[Person2] with MongoId[Person2] {
def meta = Person2
object children extends MongoJsonObjectListField(this, Child)
object firstBorn extends JsonObjectField(this, Child) {
def defaultValue = Child("", now)
}
}
object Person2 extends Person2 with MongoMetaRecord[Person2] {
override def formats = allFormats
}
class Player extends MongoRecord[Player] with MongoId[Player] {
def meta = Player
object name extends StringField(this, 256)
}
object Player extends Player with MongoMetaRecord[Player]
/*
* ObjectId as String
*/
case class Team(id: String, name: String, qb: String) extends JsonObject[Team] {
def meta = Team
}
object Team extends JsonObjectMeta[Team]
class League extends MongoRecord[League] with MongoId[League] {
def meta = League
object teams extends MongoJsonObjectListField(this, Team)
object champion extends JsonObjectField(this, Team) {
def defaultValue = Team("", "", "")
}
}
object League extends League with MongoMetaRecord[League]
/*
* ObjectId as ObjectId
*/
case class Team2(id: ObjectId, name: String, qb: ObjectId) extends JsonObject[Team2] {
def meta = Team2
}
object Team2 extends JsonObjectMeta[Team2]
class League2 extends MongoRecord[League2] with MongoId[League2] {
def meta = League2
object teams extends MongoJsonObjectListField(this, Team2)
object champion extends JsonObjectField(this, Team2) {
def defaultValue = Team2(ObjectId.get, "", ObjectId.get)
}
}
object League2 extends League2 with MongoMetaRecord[League2] {
override def formats = super.formats + new ObjectIdSerializer
}
object WeekDay extends Enumeration {
type WeekDay = Value
val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value
}
class EnumRec extends MongoRecord[EnumRec] with MongoId[EnumRec] {
def meta = EnumRec
object dow extends EnumField(this, WeekDay)
}
object EnumRec extends EnumRec with MongoMetaRecord[EnumRec] {
override def collectionName = "enumrecs"
}
}
object CustomSerializersSpecs extends Specification with MongoTestKit {
import customserializersspecs._
"CustomSerializers" should {
"handle Date as String value in JsonObjects" in {
checkMongoIsRunning
// test data
val bdjack = Calendar.getInstance.setTimezone(utc)
bdjack.setTimeInMillis(1288742280000L)
val bdjill = Calendar.getInstance.setTimezone(utc)
bdjill.setTimeInMillis(1288742880000L)
val jack = Child("Jack", bdjack.getTime)
val jill = Child("Jill", bdjill.getTime)
// create and save a Person record
val mother = Person.createRecord
mother.children(List(jack, jill))
mother.firstBorn(jack)
mother.save
// retrieve it and compare
val mother2 = Person.find(mother.id)
mother2 must notBeEmpty
mother2 foreach { m =>
m.children.value mustEqual mother.children.value
m.firstBorn.value mustEqual mother.firstBorn.value
}
// check the conversion functions
/*
mother.children.asJs mustEqual JsArray(
JsObj(("name", Str("Jack")), ("birthdate", Str("2010-11-02T23:58:00.000Z"))),
JsObj(("name", Str("Jill")), ("birthdate", Str("2010-11-03T00:08:00.000Z")))
)*/
mother.children.asJValue mustEqual JArray(List(
JObject(List(
JField("name", JString("Jack")),
JField("birthdate", JString("2010-11-02T23:58:00.000Z"))
)),
JObject(List(
JField("name", JString("Jill")),
JField("birthdate", JString("2010-11-03T00:08:00.000Z"))))
))
mother.children.toForm must beEmpty
/*
mother.firstBorn.asJs mustEqual
JsObj(("name", Str("Jack")), ("birthdate", Str("2010-11-02T23:58:00.000Z")))
*/
mother.firstBorn.asJValue mustEqual
JObject(List(
JField("name", JString("Jack")),
JField("birthdate", JString("2010-11-02T23:58:00.000Z"))
))
mother.firstBorn.toForm must beEmpty
}
"handle Date as Date value in JsonObjects using DateSerializer" in {
checkMongoIsRunning
// test data
val bdjack = Calendar.getInstance.setTimezone(utc)
bdjack.setTimeInMillis(1288742280000L)
val bdjill = Calendar.getInstance.setTimezone(utc)
bdjill.setTimeInMillis(1288742880000L)
val jack = Child("Jack", bdjack.getTime)
val jill = Child("Jill", bdjill.getTime)
// create and save a Person record
val mother = Person2.createRecord
mother.children(List(jack, jill))
mother.firstBorn(jack)
mother.save
// retrieve it and compare
val mother2 = Person2.find(mother.id)
mother2 must notBeEmpty
mother2 foreach { m =>
m.children.value mustEqual mother.children.value
m.firstBorn.value mustEqual mother.firstBorn.value
}
// check the conversion functions
/*
mother.children.asJs mustEqual JsArray(
JsObj(("name", Str("Jack")), ("birthdate", JsObj(("$dt", Str("2010-11-02T23:58:00.000Z"))))),
JsObj(("name", Str("Jill")), ("birthdate", JsObj(("$dt", Str("2010-11-03T00:08:00.000Z")))))
)*/
mother.children.asJValue mustEqual JArray(List(
JObject(List(
JField("name", JString("Jack")),
JField("birthdate", JObject(List(JField("$dt", JString("2010-11-02T23:58:00.000Z")))))
)),
JObject(List(
JField("name", JString("Jill")),
JField("birthdate", JObject(List(JField("$dt", JString("2010-11-03T00:08:00.000Z")))))
))
))
mother.children.toForm must beEmpty
/*
mother.firstBorn.asJs mustEqual
JsObj(("name", Str("Jack")), ("birthdate", JsObj(("$dt", Str("2010-11-02T23:58:00.000Z")))))
*/
mother.firstBorn.asJValue mustEqual
JObject(List(
JField("name", JString("Jack")),
JField("birthdate", JObject(List(JField("$dt", JString("2010-11-02T23:58:00.000Z")))))
))
mother.firstBorn.toForm must beEmpty
}
"handle ObjectId as String value in JsonObjects" in {
checkMongoIsRunning
// test data
val rmoss = Player.createRecord.name("Randy Moss").save
val bfavre = Player.createRecord.name("Brett Favre").save
val vikes = Team(ObjectId.get.toString, "Vikings", bfavre.id.toString)
val jets = Team(ObjectId.get.toString, "Jets", "")
val saints = Team(ObjectId.get.toString, "Saints", "")
// create and save a League record
val nfl = League.createRecord
nfl.teams(List(vikes, jets, saints))
nfl.champion(saints)
nfl.save
// retrieve it and compare
val nfl2 = League.find(nfl.id)
nfl2 must notBeEmpty
nfl2 foreach { l =>
l.teams.value mustEqual nfl.teams.value
l.champion.value mustEqual nfl.champion.value
}
// find a player
val vqb = Player.find(vikes.qb)
vqb must notBeEmpty
vqb foreach { p =>
p.name.value mustEqual "Brett Favre"
}
// check the conversion functions
// nfl._id.asJs mustEqual Str(nfl._id.value.toString)
nfl._id.asJValue mustEqual JString(nfl._id.value.toString)
val session = new LiftSession("", randomString(20), Empty)
val formPattern = "<input name=\\".*\\" type=\\"text\\" tabindex=\\"1\\" value=\\""+nfl._id.value.toString+"\\" id=\\"_id_id_field\\"></input>"
S.initIfUninitted(session) {
val form = nfl._id.toForm
form must notBeEmpty
form foreach { f =>
f.toString must beMatching(formPattern)
}
}
// check the setFrom* functions
val nflid = ObjectId.get
nfl._id.setFromString(nflid.toString)
nfl._id.value mustEqual nflid
nfl._id.setFromString("garbage")
nfl._id.valueBox mustEqual Failure("Invalid ObjectId string: garbage")
nfl._id.setFromJValue(JString(nflid.toString))
nfl._id.value mustEqual nflid
nfl._id.setFromAny(nflid)
nfl._id.value mustEqual nflid
nfl._id.setFromAny(nflid.toString)
nfl._id.value mustEqual nflid
}
"handle ObjectId as ObjectId values in JsonObjects using ObjectIdSerializer" in {
checkMongoIsRunning
// test data
val rmoss = Player.createRecord.name("Randy Moss").save
val bfavre = Player.createRecord.name("Brett Favre").save
val vikes = Team2(ObjectId.get, "Vikings", bfavre.id)
val jets = Team2(ObjectId.get, "Jets", bfavre.id)
val saints = Team2(ObjectId.get, "Saints", bfavre.id)
// create and save a League record
val nfl = League2.createRecord
nfl.teams(List(vikes, jets, saints))
nfl.champion(saints)
nfl.save
// retrieve it and compare
val nfl2 = League2.find(nfl.id.toString)
nfl2 must notBeEmpty
nfl2 foreach { l =>
l.teams.value mustEqual nfl.teams.value
l.champion.value mustEqual nfl.champion.value
}
// find a player
val vqb = Player.find(vikes.qb)
vqb must notBeEmpty
vqb foreach { p =>
p.name.value mustEqual "Brett Favre"
}
// check the conversion functions
// nfl._id.asJs mustEqual JsObj(("$oid", Str(nfl._id.value.toString)))
nfl._id.asJValue mustEqual JObject(List(JField("$oid", JString(nfl._id.value.toString))))
val session = new LiftSession("", randomString(20), Empty)
val formPattern = "<input name=\\".*\\" type=\\"text\\" tabindex=\\"1\\" value=\\""+nfl._id.value.toString+"\\" id=\\"_id_id_field\\"></input>"
S.initIfUninitted(session) {
val form = nfl._id.toForm
form must notBeEmpty
form foreach { f =>
f.toString must beMatching(formPattern)
}
}
// check the setFrom* functions
val nflid = ObjectId.get
nfl._id.setFromString(nflid.toString)
nfl._id.value mustEqual nflid
nfl._id.setFromString("garbage")
nfl._id.valueBox mustEqual Failure("Invalid ObjectId string: garbage")
nfl._id.setFromJValue(JObject(List(JField("$oid", JString(nflid.toString)))))
nfl._id.value mustEqual nflid
nfl._id.setFromAny(nflid)
nfl._id.value mustEqual nflid
nfl._id.setFromAny(nflid.toString)
nfl._id.value mustEqual nflid
}
}
}
}
}
}
| wsaccaco/lift | framework/lift-persistence/lift-mongodb-record/src/test/scala/net/liftweb/mongodb/record/CustomSerializersSpecs.scala | Scala | apache-2.0 | 11,966 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.parser
import java.sql.{Date, Timestamp}
import java.util.Locale
import javax.xml.bind.DatatypeConverter
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import org.antlr.v4.runtime.{ParserRuleContext, Token}
import org.antlr.v4.runtime.tree.{ParseTree, RuleNode, TerminalNode}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate.{First, Last}
import org.apache.spark.sql.catalyst.parser.SqlBaseParser._
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.CalendarInterval
import org.apache.spark.util.random.RandomSampler
/**
* The AstBuilder converts an ANTLR4 ParseTree into a catalyst Expression, LogicalPlan or
* TableIdentifier.
*/
class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging {
import ParserUtils._
def this() = this(new SQLConf())
protected def typedVisit[T](ctx: ParseTree): T = {
ctx.accept(this).asInstanceOf[T]
}
/**
* Override the default behavior for all visit methods. This will only return a non-null result
* when the context has only one child. This is done because there is no generic method to
* combine the results of the context children. In all other cases null is returned.
*/
override def visitChildren(node: RuleNode): AnyRef = {
if (node.getChildCount == 1) {
node.getChild(0).accept(this)
} else {
null
}
}
override def visitSingleStatement(ctx: SingleStatementContext): LogicalPlan = withOrigin(ctx) {
visit(ctx.statement).asInstanceOf[LogicalPlan]
}
override def visitSingleExpression(ctx: SingleExpressionContext): Expression = withOrigin(ctx) {
visitNamedExpression(ctx.namedExpression)
}
override def visitSingleTableIdentifier(
ctx: SingleTableIdentifierContext): TableIdentifier = withOrigin(ctx) {
visitTableIdentifier(ctx.tableIdentifier)
}
override def visitSingleFunctionIdentifier(
ctx: SingleFunctionIdentifierContext): FunctionIdentifier = withOrigin(ctx) {
visitFunctionIdentifier(ctx.functionIdentifier)
}
override def visitSingleDataType(ctx: SingleDataTypeContext): DataType = withOrigin(ctx) {
visitSparkDataType(ctx.dataType)
}
override def visitSingleTableSchema(ctx: SingleTableSchemaContext): StructType = {
withOrigin(ctx)(StructType(visitColTypeList(ctx.colTypeList)))
}
/* ********************************************************************************************
* Plan parsing
* ******************************************************************************************** */
protected def plan(tree: ParserRuleContext): LogicalPlan = typedVisit(tree)
/**
* Create a top-level plan with Common Table Expressions.
*/
override def visitQuery(ctx: QueryContext): LogicalPlan = withOrigin(ctx) {
val query = plan(ctx.queryNoWith)
// Apply CTEs
query.optional(ctx.ctes) {
val ctes = ctx.ctes.namedQuery.asScala.map { nCtx =>
val namedQuery = visitNamedQuery(nCtx)
(namedQuery.alias, namedQuery)
}
// Check for duplicate names.
checkDuplicateKeys(ctes, ctx)
With(query, ctes)
}
}
/**
* Create a named logical plan.
*
* This is only used for Common Table Expressions.
*/
override def visitNamedQuery(ctx: NamedQueryContext): SubqueryAlias = withOrigin(ctx) {
SubqueryAlias(ctx.name.getText, plan(ctx.query))
}
/**
* Create a logical plan which allows for multiple inserts using one 'from' statement. These
* queries have the following SQL form:
* {{{
* [WITH cte...]?
* FROM src
* [INSERT INTO tbl1 SELECT *]+
* }}}
* For example:
* {{{
* FROM db.tbl1 A
* INSERT INTO dbo.tbl1 SELECT * WHERE A.value = 10 LIMIT 5
* INSERT INTO dbo.tbl2 SELECT * WHERE A.value = 12
* }}}
* This (Hive) feature cannot be combined with set-operators.
*/
override def visitMultiInsertQuery(ctx: MultiInsertQueryContext): LogicalPlan = withOrigin(ctx) {
val from = visitFromClause(ctx.fromClause)
// Build the insert clauses.
val inserts = ctx.multiInsertQueryBody.asScala.map {
body =>
validate(body.querySpecification.fromClause == null,
"Multi-Insert queries cannot have a FROM clause in their individual SELECT statements",
body)
withQuerySpecification(body.querySpecification, from).
// Add organization statements.
optionalMap(body.queryOrganization)(withQueryResultClauses).
// Add insert.
optionalMap(body.insertInto())(withInsertInto)
}
// If there are multiple INSERTS just UNION them together into one query.
inserts match {
case Seq(query) => query
case queries => Union(queries)
}
}
/**
* Create a logical plan for a regular (single-insert) query.
*/
override def visitSingleInsertQuery(
ctx: SingleInsertQueryContext): LogicalPlan = withOrigin(ctx) {
plan(ctx.queryTerm).
// Add organization statements.
optionalMap(ctx.queryOrganization)(withQueryResultClauses).
// Add insert.
optionalMap(ctx.insertInto())(withInsertInto)
}
/**
* Add an INSERT INTO [TABLE]/INSERT OVERWRITE TABLE operation to the logical plan.
*/
private def withInsertInto(
ctx: InsertIntoContext,
query: LogicalPlan): LogicalPlan = withOrigin(ctx) {
val tableIdent = visitTableIdentifier(ctx.tableIdentifier)
val partitionKeys = Option(ctx.partitionSpec).map(visitPartitionSpec).getOrElse(Map.empty)
val dynamicPartitionKeys: Map[String, Option[String]] = partitionKeys.filter(_._2.isEmpty)
if (ctx.EXISTS != null && dynamicPartitionKeys.nonEmpty) {
throw new ParseException(s"Dynamic partitions do not support IF NOT EXISTS. Specified " +
"partitions with value: " + dynamicPartitionKeys.keys.mkString("[", ",", "]"), ctx)
}
InsertIntoTable(
UnresolvedRelation(tableIdent),
partitionKeys,
query,
ctx.OVERWRITE != null,
ctx.EXISTS != null)
}
/**
* Create a partition specification map.
*/
override def visitPartitionSpec(
ctx: PartitionSpecContext): Map[String, Option[String]] = withOrigin(ctx) {
val parts = ctx.partitionVal.asScala.map { pVal =>
val name = pVal.identifier.getText
val value = Option(pVal.constant).map(visitStringConstant)
name -> value
}
// Before calling `toMap`, we check duplicated keys to avoid silently ignore partition values
// in partition spec like PARTITION(a='1', b='2', a='3'). The real semantical check for
// partition columns will be done in analyzer.
checkDuplicateKeys(parts, ctx)
parts.toMap
}
/**
* Create a partition specification map without optional values.
*/
protected def visitNonOptionalPartitionSpec(
ctx: PartitionSpecContext): Map[String, String] = withOrigin(ctx) {
visitPartitionSpec(ctx).map {
case (key, None) => throw new ParseException(s"Found an empty partition key '$key'.", ctx)
case (key, Some(value)) => key -> value
}
}
/**
* Convert a constant of any type into a string. This is typically used in DDL commands, and its
* main purpose is to prevent slight differences due to back to back conversions i.e.:
* String -> Literal -> String.
*/
protected def visitStringConstant(ctx: ConstantContext): String = withOrigin(ctx) {
ctx match {
case s: StringLiteralContext => createString(s)
case o => o.getText
}
}
/**
* Add ORDER BY/SORT BY/CLUSTER BY/DISTRIBUTE BY/LIMIT/WINDOWS clauses to the logical plan. These
* clauses determine the shape (ordering/partitioning/rows) of the query result.
*/
private def withQueryResultClauses(
ctx: QueryOrganizationContext,
query: LogicalPlan): LogicalPlan = withOrigin(ctx) {
import ctx._
// Handle ORDER BY, SORT BY, DISTRIBUTE BY, and CLUSTER BY clause.
val withOrder = if (
!order.isEmpty && sort.isEmpty && distributeBy.isEmpty && clusterBy.isEmpty) {
// ORDER BY ...
Sort(order.asScala.map(visitSortItem), global = true, query)
} else if (order.isEmpty && !sort.isEmpty && distributeBy.isEmpty && clusterBy.isEmpty) {
// SORT BY ...
Sort(sort.asScala.map(visitSortItem), global = false, query)
} else if (order.isEmpty && sort.isEmpty && !distributeBy.isEmpty && clusterBy.isEmpty) {
// DISTRIBUTE BY ...
withRepartitionByExpression(ctx, expressionList(distributeBy), query)
} else if (order.isEmpty && !sort.isEmpty && !distributeBy.isEmpty && clusterBy.isEmpty) {
// SORT BY ... DISTRIBUTE BY ...
Sort(
sort.asScala.map(visitSortItem),
global = false,
withRepartitionByExpression(ctx, expressionList(distributeBy), query))
} else if (order.isEmpty && sort.isEmpty && distributeBy.isEmpty && !clusterBy.isEmpty) {
// CLUSTER BY ...
val expressions = expressionList(clusterBy)
Sort(
expressions.map(SortOrder(_, Ascending)),
global = false,
withRepartitionByExpression(ctx, expressions, query))
} else if (order.isEmpty && sort.isEmpty && distributeBy.isEmpty && clusterBy.isEmpty) {
// [EMPTY]
query
} else {
throw new ParseException(
"Combination of ORDER BY/SORT BY/DISTRIBUTE BY/CLUSTER BY is not supported", ctx)
}
// WINDOWS
val withWindow = withOrder.optionalMap(windows)(withWindows)
// LIMIT
// - LIMIT ALL is the same as omitting the LIMIT clause
withWindow.optional(limit) {
Limit(typedVisit(limit), withWindow)
}
}
/**
* Create a clause for DISTRIBUTE BY.
*/
protected def withRepartitionByExpression(
ctx: QueryOrganizationContext,
expressions: Seq[Expression],
query: LogicalPlan): LogicalPlan = {
throw new ParseException("DISTRIBUTE BY is not supported", ctx)
}
/**
* Create a logical plan using a query specification.
*/
override def visitQuerySpecification(
ctx: QuerySpecificationContext): LogicalPlan = withOrigin(ctx) {
val from = OneRowRelation().optional(ctx.fromClause) {
visitFromClause(ctx.fromClause)
}
withQuerySpecification(ctx, from)
}
/**
* Add a query specification to a logical plan. The query specification is the core of the logical
* plan, this is where sourcing (FROM clause), transforming (SELECT TRANSFORM/MAP/REDUCE),
* projection (SELECT), aggregation (GROUP BY ... HAVING ...) and filtering (WHERE) takes place.
*
* Note that query hints are ignored (both by the parser and the builder).
*/
private def withQuerySpecification(
ctx: QuerySpecificationContext,
relation: LogicalPlan): LogicalPlan = withOrigin(ctx) {
import ctx._
// WHERE
def filter(ctx: BooleanExpressionContext, plan: LogicalPlan): LogicalPlan = {
Filter(expression(ctx), plan)
}
// Expressions.
val expressions = Option(namedExpressionSeq).toSeq
.flatMap(_.namedExpression.asScala)
.map(typedVisit[Expression])
// Create either a transform or a regular query.
val specType = Option(kind).map(_.getType).getOrElse(SqlBaseParser.SELECT)
specType match {
case SqlBaseParser.MAP | SqlBaseParser.REDUCE | SqlBaseParser.TRANSFORM =>
// Transform
// Add where.
val withFilter = relation.optionalMap(where)(filter)
// Create the attributes.
val (attributes, schemaLess) = if (colTypeList != null) {
// Typed return columns.
(createSchema(colTypeList).toAttributes, false)
} else if (identifierSeq != null) {
// Untyped return columns.
val attrs = visitIdentifierSeq(identifierSeq).map { name =>
AttributeReference(name, StringType, nullable = true)()
}
(attrs, false)
} else {
(Seq(AttributeReference("key", StringType)(),
AttributeReference("value", StringType)()), true)
}
// Create the transform.
ScriptTransformation(
expressions,
string(script),
attributes,
withFilter,
withScriptIOSchema(
ctx, inRowFormat, recordWriter, outRowFormat, recordReader, schemaLess))
case SqlBaseParser.SELECT =>
// Regular select
// Add lateral views.
val withLateralView = ctx.lateralView.asScala.foldLeft(relation)(withGenerate)
// Add where.
val withFilter = withLateralView.optionalMap(where)(filter)
// Add aggregation or a project.
val namedExpressions = expressions.map {
case e: NamedExpression => e
case e: Expression => UnresolvedAlias(e)
}
val withProject = if (aggregation != null) {
withAggregation(aggregation, namedExpressions, withFilter)
} else if (namedExpressions.nonEmpty) {
Project(namedExpressions, withFilter)
} else {
withFilter
}
// Having
val withHaving = withProject.optional(having) {
// Note that we add a cast to non-predicate expressions. If the expression itself is
// already boolean, the optimizer will get rid of the unnecessary cast.
val predicate = expression(having) match {
case p: Predicate => p
case e => Cast(e, BooleanType)
}
Filter(predicate, withProject)
}
// Distinct
val withDistinct = if (setQuantifier() != null && setQuantifier().DISTINCT() != null) {
Distinct(withHaving)
} else {
withHaving
}
// Window
val withWindow = withDistinct.optionalMap(windows)(withWindows)
// Hint
hints.asScala.foldRight(withWindow)(withHints)
}
}
/**
* Create a (Hive based) [[ScriptInputOutputSchema]].
*/
protected def withScriptIOSchema(
ctx: QuerySpecificationContext,
inRowFormat: RowFormatContext,
recordWriter: Token,
outRowFormat: RowFormatContext,
recordReader: Token,
schemaLess: Boolean): ScriptInputOutputSchema = {
throw new ParseException("Script Transform is not supported", ctx)
}
/**
* Create a logical plan for a given 'FROM' clause. Note that we support multiple (comma
* separated) relations here, these get converted into a single plan by condition-less inner join.
*/
override def visitFromClause(ctx: FromClauseContext): LogicalPlan = withOrigin(ctx) {
val from = ctx.relation.asScala.foldLeft(null: LogicalPlan) { (left, relation) =>
val right = plan(relation.relationPrimary)
val join = right.optionalMap(left)(Join(_, _, Inner, None))
withJoinRelations(join, relation)
}
ctx.lateralView.asScala.foldLeft(from)(withGenerate)
}
/**
* Connect two queries by a Set operator.
*
* Supported Set operators are:
* - UNION [DISTINCT]
* - UNION ALL
* - EXCEPT [DISTINCT]
* - MINUS [DISTINCT]
* - INTERSECT [DISTINCT]
*/
override def visitSetOperation(ctx: SetOperationContext): LogicalPlan = withOrigin(ctx) {
val left = plan(ctx.left)
val right = plan(ctx.right)
val all = Option(ctx.setQuantifier()).exists(_.ALL != null)
ctx.operator.getType match {
case SqlBaseParser.UNION if all =>
Union(left, right)
case SqlBaseParser.UNION =>
Distinct(Union(left, right))
case SqlBaseParser.INTERSECT if all =>
throw new ParseException("INTERSECT ALL is not supported.", ctx)
case SqlBaseParser.INTERSECT =>
Intersect(left, right)
case SqlBaseParser.EXCEPT if all =>
throw new ParseException("EXCEPT ALL is not supported.", ctx)
case SqlBaseParser.EXCEPT =>
Except(left, right)
case SqlBaseParser.SETMINUS if all =>
throw new ParseException("MINUS ALL is not supported.", ctx)
case SqlBaseParser.SETMINUS =>
Except(left, right)
}
}
/**
* Add a [[WithWindowDefinition]] operator to a logical plan.
*/
private def withWindows(
ctx: WindowsContext,
query: LogicalPlan): LogicalPlan = withOrigin(ctx) {
// Collect all window specifications defined in the WINDOW clause.
val baseWindowMap = ctx.namedWindow.asScala.map {
wCtx =>
(wCtx.identifier.getText, typedVisit[WindowSpec](wCtx.windowSpec))
}.toMap
// Handle cases like
// window w1 as (partition by p_mfgr order by p_name
// range between 2 preceding and 2 following),
// w2 as w1
val windowMapView = baseWindowMap.mapValues {
case WindowSpecReference(name) =>
baseWindowMap.get(name) match {
case Some(spec: WindowSpecDefinition) =>
spec
case Some(ref) =>
throw new ParseException(s"Window reference '$name' is not a window specification", ctx)
case None =>
throw new ParseException(s"Cannot resolve window reference '$name'", ctx)
}
case spec: WindowSpecDefinition => spec
}
// Note that mapValues creates a view instead of materialized map. We force materialization by
// mapping over identity.
WithWindowDefinition(windowMapView.map(identity), query)
}
/**
* Add an [[Aggregate]] or [[GroupingSets]] to a logical plan.
*/
private def withAggregation(
ctx: AggregationContext,
selectExpressions: Seq[NamedExpression],
query: LogicalPlan): LogicalPlan = withOrigin(ctx) {
val groupByExpressions = expressionList(ctx.groupingExpressions)
if (ctx.GROUPING != null) {
// GROUP BY .... GROUPING SETS (...)
val selectedGroupByExprs =
ctx.groupingSet.asScala.map(_.expression.asScala.map(e => expression(e)))
GroupingSets(selectedGroupByExprs, groupByExpressions, query, selectExpressions)
} else {
// GROUP BY .... (WITH CUBE | WITH ROLLUP)?
val mappedGroupByExpressions = if (ctx.CUBE != null) {
Seq(Cube(groupByExpressions))
} else if (ctx.ROLLUP != null) {
Seq(Rollup(groupByExpressions))
} else {
groupByExpressions
}
Aggregate(mappedGroupByExpressions, selectExpressions, query)
}
}
/**
* Add [[UnresolvedHint]]s to a logical plan.
*/
private def withHints(
ctx: HintContext,
query: LogicalPlan): LogicalPlan = withOrigin(ctx) {
var plan = query
ctx.hintStatements.asScala.reverse.foreach { case stmt =>
plan = UnresolvedHint(stmt.hintName.getText, stmt.parameters.asScala.map(expression), plan)
}
plan
}
/**
* Add a [[Generate]] (Lateral View) to a logical plan.
*/
private def withGenerate(
query: LogicalPlan,
ctx: LateralViewContext): LogicalPlan = withOrigin(ctx) {
val expressions = expressionList(ctx.expression)
Generate(
UnresolvedGenerator(visitFunctionName(ctx.qualifiedName), expressions),
join = true,
outer = ctx.OUTER != null,
Some(ctx.tblName.getText.toLowerCase),
ctx.colName.asScala.map(_.getText).map(UnresolvedAttribute.apply),
query)
}
/**
* Create a single relation referenced in a FROM clause. This method is used when a part of the
* join condition is nested, for example:
* {{{
* select * from t1 join (t2 cross join t3) on col1 = col2
* }}}
*/
override def visitRelation(ctx: RelationContext): LogicalPlan = withOrigin(ctx) {
withJoinRelations(plan(ctx.relationPrimary), ctx)
}
/**
* Join one more [[LogicalPlan]]s to the current logical plan.
*/
private def withJoinRelations(base: LogicalPlan, ctx: RelationContext): LogicalPlan = {
ctx.joinRelation.asScala.foldLeft(base) { (left, join) =>
withOrigin(join) {
val baseJoinType = join.joinType match {
case null => Inner
case jt if jt.CROSS != null => Cross
case jt if jt.FULL != null => FullOuter
case jt if jt.SEMI != null => LeftSemi
case jt if jt.ANTI != null => LeftAnti
case jt if jt.LEFT != null => LeftOuter
case jt if jt.RIGHT != null => RightOuter
case _ => Inner
}
// Resolve the join type and join condition
val (joinType, condition) = Option(join.joinCriteria) match {
case Some(c) if c.USING != null =>
(UsingJoin(baseJoinType, c.identifier.asScala.map(_.getText)), None)
case Some(c) if c.booleanExpression != null =>
(baseJoinType, Option(expression(c.booleanExpression)))
case None if join.NATURAL != null =>
if (baseJoinType == Cross) {
throw new ParseException("NATURAL CROSS JOIN is not supported", ctx)
}
(NaturalJoin(baseJoinType), None)
case None =>
(baseJoinType, None)
}
Join(left, plan(join.right), joinType, condition)
}
}
}
/**
* Add a [[Sample]] to a logical plan.
*
* This currently supports the following sampling methods:
* - TABLESAMPLE(x ROWS): Sample the table down to the given number of rows.
* - TABLESAMPLE(x PERCENT): Sample the table down to the given percentage. Note that percentages
* are defined as a number between 0 and 100.
* - TABLESAMPLE(BUCKET x OUT OF y): Sample the table down to a 'x' divided by 'y' fraction.
*/
private def withSample(ctx: SampleContext, query: LogicalPlan): LogicalPlan = withOrigin(ctx) {
// Create a sampled plan if we need one.
def sample(fraction: Double): Sample = {
// The range of fraction accepted by Sample is [0, 1]. Because Hive's block sampling
// function takes X PERCENT as the input and the range of X is [0, 100], we need to
// adjust the fraction.
val eps = RandomSampler.roundingEpsilon
validate(fraction >= 0.0 - eps && fraction <= 1.0 + eps,
s"Sampling fraction ($fraction) must be on interval [0, 1]",
ctx)
Sample(0.0, fraction, withReplacement = false, (math.random * 1000).toInt, query)
}
ctx.sampleType.getType match {
case SqlBaseParser.ROWS =>
Limit(expression(ctx.expression), query)
case SqlBaseParser.PERCENTLIT =>
val fraction = ctx.percentage.getText.toDouble
val sign = if (ctx.negativeSign == null) 1 else -1
sample(sign * fraction / 100.0d)
case SqlBaseParser.BYTELENGTH_LITERAL =>
throw new ParseException(
"TABLESAMPLE(byteLengthLiteral) is not supported", ctx)
case SqlBaseParser.BUCKET if ctx.ON != null =>
if (ctx.identifier != null) {
throw new ParseException(
"TABLESAMPLE(BUCKET x OUT OF y ON colname) is not supported", ctx)
} else {
throw new ParseException(
"TABLESAMPLE(BUCKET x OUT OF y ON function) is not supported", ctx)
}
case SqlBaseParser.BUCKET =>
sample(ctx.numerator.getText.toDouble / ctx.denominator.getText.toDouble)
}
}
/**
* Create a logical plan for a sub-query.
*/
override def visitSubquery(ctx: SubqueryContext): LogicalPlan = withOrigin(ctx) {
plan(ctx.queryNoWith)
}
/**
* Create an un-aliased table reference. This is typically used for top-level table references,
* for example:
* {{{
* INSERT INTO db.tbl2
* TABLE db.tbl1
* }}}
*/
override def visitTable(ctx: TableContext): LogicalPlan = withOrigin(ctx) {
UnresolvedRelation(visitTableIdentifier(ctx.tableIdentifier))
}
/**
* Create an aliased table reference. This is typically used in FROM clauses.
*/
override def visitTableName(ctx: TableNameContext): LogicalPlan = withOrigin(ctx) {
val tableId = visitTableIdentifier(ctx.tableIdentifier)
val table = mayApplyAliasPlan(ctx.tableAlias, UnresolvedRelation(tableId))
table.optionalMap(ctx.sample)(withSample)
}
/**
* Create a table-valued function call with arguments, e.g. range(1000)
*/
override def visitTableValuedFunction(ctx: TableValuedFunctionContext)
: LogicalPlan = withOrigin(ctx) {
val func = ctx.functionTable
val aliases = if (func.tableAlias.identifierList != null) {
visitIdentifierList(func.tableAlias.identifierList)
} else {
Seq.empty
}
val tvf = UnresolvedTableValuedFunction(
func.identifier.getText, func.expression.asScala.map(expression), aliases)
tvf.optionalMap(func.tableAlias.strictIdentifier)(aliasPlan)
}
/**
* Create an inline table (a virtual table in Hive parlance).
*/
override def visitInlineTable(ctx: InlineTableContext): LogicalPlan = withOrigin(ctx) {
// Get the backing expressions.
val rows = ctx.expression.asScala.map { e =>
expression(e) match {
// inline table comes in two styles:
// style 1: values (1), (2), (3) -- multiple columns are supported
// style 2: values 1, 2, 3 -- only a single column is supported here
case struct: CreateNamedStruct => struct.valExprs // style 1
case child => Seq(child) // style 2
}
}
val aliases = if (ctx.tableAlias.identifierList != null) {
visitIdentifierList(ctx.tableAlias.identifierList)
} else {
Seq.tabulate(rows.head.size)(i => s"col${i + 1}")
}
val table = UnresolvedInlineTable(aliases, rows)
table.optionalMap(ctx.tableAlias.strictIdentifier)(aliasPlan)
}
/**
* Create an alias (SubqueryAlias) for a join relation. This is practically the same as
* visitAliasedQuery and visitNamedExpression, ANTLR4 however requires us to use 3 different
* hooks. We could add alias names for output columns, for example:
* {{{
* SELECT a, b, c, d FROM (src1 s1 INNER JOIN src2 s2 ON s1.id = s2.id) dst(a, b, c, d)
* }}}
*/
override def visitAliasedRelation(ctx: AliasedRelationContext): LogicalPlan = withOrigin(ctx) {
val relation = plan(ctx.relation).optionalMap(ctx.sample)(withSample)
mayApplyAliasPlan(ctx.tableAlias, relation)
}
/**
* Create an alias (SubqueryAlias) for a sub-query. This is practically the same as
* visitAliasedRelation and visitNamedExpression, ANTLR4 however requires us to use 3 different
* hooks. We could add alias names for output columns, for example:
* {{{
* SELECT col1, col2 FROM testData AS t(col1, col2)
* }}}
*/
override def visitAliasedQuery(ctx: AliasedQueryContext): LogicalPlan = withOrigin(ctx) {
val relation = plan(ctx.queryNoWith).optionalMap(ctx.sample)(withSample)
if (ctx.tableAlias.strictIdentifier == null) {
// For un-aliased subqueries, use a default alias name that is not likely to conflict with
// normal subquery names, so that parent operators can only access the columns in subquery by
// unqualified names. Users can still use this special qualifier to access columns if they
// know it, but that's not recommended.
SubqueryAlias("__auto_generated_subquery_name", relation)
} else {
mayApplyAliasPlan(ctx.tableAlias, relation)
}
}
/**
* Create an alias ([[SubqueryAlias]]) for a [[LogicalPlan]].
*/
private def aliasPlan(alias: ParserRuleContext, plan: LogicalPlan): LogicalPlan = {
SubqueryAlias(alias.getText, plan)
}
/**
* If aliases specified in a FROM clause, create a subquery alias ([[SubqueryAlias]]) and
* column aliases for a [[LogicalPlan]].
*/
private def mayApplyAliasPlan(tableAlias: TableAliasContext, plan: LogicalPlan): LogicalPlan = {
if (tableAlias.strictIdentifier != null) {
val subquery = SubqueryAlias(tableAlias.strictIdentifier.getText, plan)
if (tableAlias.identifierList != null) {
val columnNames = visitIdentifierList(tableAlias.identifierList)
UnresolvedSubqueryColumnAliases(columnNames, subquery)
} else {
subquery
}
} else {
plan
}
}
/**
* Create a Sequence of Strings for a parenthesis enclosed alias list.
*/
override def visitIdentifierList(ctx: IdentifierListContext): Seq[String] = withOrigin(ctx) {
visitIdentifierSeq(ctx.identifierSeq)
}
/**
* Create a Sequence of Strings for an identifier list.
*/
override def visitIdentifierSeq(ctx: IdentifierSeqContext): Seq[String] = withOrigin(ctx) {
ctx.identifier.asScala.map(_.getText)
}
/* ********************************************************************************************
* Table Identifier parsing
* ******************************************************************************************** */
/**
* Create a [[TableIdentifier]] from a 'tableName' or 'databaseName'.'tableName' pattern.
*/
override def visitTableIdentifier(
ctx: TableIdentifierContext): TableIdentifier = withOrigin(ctx) {
TableIdentifier(ctx.table.getText, Option(ctx.db).map(_.getText))
}
/**
* Create a [[FunctionIdentifier]] from a 'functionName' or 'databaseName'.'functionName' pattern.
*/
override def visitFunctionIdentifier(
ctx: FunctionIdentifierContext): FunctionIdentifier = withOrigin(ctx) {
FunctionIdentifier(ctx.function.getText, Option(ctx.db).map(_.getText))
}
/* ********************************************************************************************
* Expression parsing
* ******************************************************************************************** */
/**
* Create an expression from the given context. This method just passes the context on to the
* visitor and only takes care of typing (We assume that the visitor returns an Expression here).
*/
protected def expression(ctx: ParserRuleContext): Expression = typedVisit(ctx)
/**
* Create sequence of expressions from the given sequence of contexts.
*/
private def expressionList(trees: java.util.List[ExpressionContext]): Seq[Expression] = {
trees.asScala.map(expression)
}
/**
* Create a star (i.e. all) expression; this selects all elements (in the specified object).
* Both un-targeted (global) and targeted aliases are supported.
*/
override def visitStar(ctx: StarContext): Expression = withOrigin(ctx) {
UnresolvedStar(Option(ctx.qualifiedName()).map(_.identifier.asScala.map(_.getText)))
}
/**
* Create an aliased expression if an alias is specified. Both single and multi-aliases are
* supported.
*/
override def visitNamedExpression(ctx: NamedExpressionContext): Expression = withOrigin(ctx) {
val e = expression(ctx.expression)
if (ctx.identifier != null) {
Alias(e, ctx.identifier.getText)()
} else if (ctx.identifierList != null) {
MultiAlias(e, visitIdentifierList(ctx.identifierList))
} else {
e
}
}
/**
* Combine a number of boolean expressions into a balanced expression tree. These expressions are
* either combined by a logical [[And]] or a logical [[Or]].
*
* A balanced binary tree is created because regular left recursive trees cause considerable
* performance degradations and can cause stack overflows.
*/
override def visitLogicalBinary(ctx: LogicalBinaryContext): Expression = withOrigin(ctx) {
val expressionType = ctx.operator.getType
val expressionCombiner = expressionType match {
case SqlBaseParser.AND => And.apply _
case SqlBaseParser.OR => Or.apply _
}
// Collect all similar left hand contexts.
val contexts = ArrayBuffer(ctx.right)
var current = ctx.left
def collectContexts: Boolean = current match {
case lbc: LogicalBinaryContext if lbc.operator.getType == expressionType =>
contexts += lbc.right
current = lbc.left
true
case _ =>
contexts += current
false
}
while (collectContexts) {
// No body - all updates take place in the collectContexts.
}
// Reverse the contexts to have them in the same sequence as in the SQL statement & turn them
// into expressions.
val expressions = contexts.reverseMap(expression)
// Create a balanced tree.
def reduceToExpressionTree(low: Int, high: Int): Expression = high - low match {
case 0 =>
expressions(low)
case 1 =>
expressionCombiner(expressions(low), expressions(high))
case x =>
val mid = low + x / 2
expressionCombiner(
reduceToExpressionTree(low, mid),
reduceToExpressionTree(mid + 1, high))
}
reduceToExpressionTree(0, expressions.size - 1)
}
/**
* Invert a boolean expression.
*/
override def visitLogicalNot(ctx: LogicalNotContext): Expression = withOrigin(ctx) {
Not(expression(ctx.booleanExpression()))
}
/**
* Create a filtering correlated sub-query (EXISTS).
*/
override def visitExists(ctx: ExistsContext): Expression = {
Exists(plan(ctx.query))
}
/**
* Create a comparison expression. This compares two expressions. The following comparison
* operators are supported:
* - Equal: '=' or '=='
* - Null-safe Equal: '<=>'
* - Not Equal: '<>' or '!='
* - Less than: '<'
* - Less then or Equal: '<='
* - Greater than: '>'
* - Greater then or Equal: '>='
*/
override def visitComparison(ctx: ComparisonContext): Expression = withOrigin(ctx) {
val left = expression(ctx.left)
val right = expression(ctx.right)
val operator = ctx.comparisonOperator().getChild(0).asInstanceOf[TerminalNode]
operator.getSymbol.getType match {
case SqlBaseParser.EQ =>
EqualTo(left, right)
case SqlBaseParser.NSEQ =>
EqualNullSafe(left, right)
case SqlBaseParser.NEQ | SqlBaseParser.NEQJ =>
Not(EqualTo(left, right))
case SqlBaseParser.LT =>
LessThan(left, right)
case SqlBaseParser.LTE =>
LessThanOrEqual(left, right)
case SqlBaseParser.GT =>
GreaterThan(left, right)
case SqlBaseParser.GTE =>
GreaterThanOrEqual(left, right)
}
}
/**
* Create a predicated expression. A predicated expression is a normal expression with a
* predicate attached to it, for example:
* {{{
* a + 1 IS NULL
* }}}
*/
override def visitPredicated(ctx: PredicatedContext): Expression = withOrigin(ctx) {
val e = expression(ctx.valueExpression)
if (ctx.predicate != null) {
withPredicate(e, ctx.predicate)
} else {
e
}
}
/**
* Add a predicate to the given expression. Supported expressions are:
* - (NOT) BETWEEN
* - (NOT) IN
* - (NOT) LIKE
* - (NOT) RLIKE
* - IS (NOT) NULL.
* - IS (NOT) DISTINCT FROM
*/
private def withPredicate(e: Expression, ctx: PredicateContext): Expression = withOrigin(ctx) {
// Invert a predicate if it has a valid NOT clause.
def invertIfNotDefined(e: Expression): Expression = ctx.NOT match {
case null => e
case not => Not(e)
}
// Create the predicate.
ctx.kind.getType match {
case SqlBaseParser.BETWEEN =>
// BETWEEN is translated to lower <= e && e <= upper
invertIfNotDefined(And(
GreaterThanOrEqual(e, expression(ctx.lower)),
LessThanOrEqual(e, expression(ctx.upper))))
case SqlBaseParser.IN if ctx.query != null =>
invertIfNotDefined(In(e, Seq(ListQuery(plan(ctx.query)))))
case SqlBaseParser.IN =>
invertIfNotDefined(In(e, ctx.expression.asScala.map(expression)))
case SqlBaseParser.LIKE =>
invertIfNotDefined(Like(e, expression(ctx.pattern)))
case SqlBaseParser.RLIKE =>
invertIfNotDefined(RLike(e, expression(ctx.pattern)))
case SqlBaseParser.NULL if ctx.NOT != null =>
IsNotNull(e)
case SqlBaseParser.NULL =>
IsNull(e)
case SqlBaseParser.DISTINCT if ctx.NOT != null =>
EqualNullSafe(e, expression(ctx.right))
case SqlBaseParser.DISTINCT =>
Not(EqualNullSafe(e, expression(ctx.right)))
}
}
/**
* Create a binary arithmetic expression. The following arithmetic operators are supported:
* - Multiplication: '*'
* - Division: '/'
* - Hive Long Division: 'DIV'
* - Modulo: '%'
* - Addition: '+'
* - Subtraction: '-'
* - Binary AND: '&'
* - Binary XOR
* - Binary OR: '|'
*/
override def visitArithmeticBinary(ctx: ArithmeticBinaryContext): Expression = withOrigin(ctx) {
val left = expression(ctx.left)
val right = expression(ctx.right)
ctx.operator.getType match {
case SqlBaseParser.ASTERISK =>
Multiply(left, right)
case SqlBaseParser.SLASH =>
Divide(left, right)
case SqlBaseParser.PERCENT =>
Remainder(left, right)
case SqlBaseParser.DIV =>
Cast(Divide(left, right), LongType)
case SqlBaseParser.PLUS =>
Add(left, right)
case SqlBaseParser.MINUS =>
Subtract(left, right)
case SqlBaseParser.CONCAT_PIPE =>
Concat(left :: right :: Nil)
case SqlBaseParser.AMPERSAND =>
BitwiseAnd(left, right)
case SqlBaseParser.HAT =>
BitwiseXor(left, right)
case SqlBaseParser.PIPE =>
BitwiseOr(left, right)
}
}
/**
* Create a unary arithmetic expression. The following arithmetic operators are supported:
* - Plus: '+'
* - Minus: '-'
* - Bitwise Not: '~'
*/
override def visitArithmeticUnary(ctx: ArithmeticUnaryContext): Expression = withOrigin(ctx) {
val value = expression(ctx.valueExpression)
ctx.operator.getType match {
case SqlBaseParser.PLUS =>
value
case SqlBaseParser.MINUS =>
UnaryMinus(value)
case SqlBaseParser.TILDE =>
BitwiseNot(value)
}
}
/**
* Create a [[Cast]] expression.
*/
override def visitCast(ctx: CastContext): Expression = withOrigin(ctx) {
Cast(expression(ctx.expression), visitSparkDataType(ctx.dataType))
}
/**
* Create a [[CreateStruct]] expression.
*/
override def visitStruct(ctx: StructContext): Expression = withOrigin(ctx) {
CreateStruct(ctx.argument.asScala.map(expression))
}
/**
* Create a [[First]] expression.
*/
override def visitFirst(ctx: FirstContext): Expression = withOrigin(ctx) {
val ignoreNullsExpr = ctx.IGNORE != null
First(expression(ctx.expression), Literal(ignoreNullsExpr)).toAggregateExpression()
}
/**
* Create a [[Last]] expression.
*/
override def visitLast(ctx: LastContext): Expression = withOrigin(ctx) {
val ignoreNullsExpr = ctx.IGNORE != null
Last(expression(ctx.expression), Literal(ignoreNullsExpr)).toAggregateExpression()
}
/**
* Create a Position expression.
*/
override def visitPosition(ctx: PositionContext): Expression = withOrigin(ctx) {
new StringLocate(expression(ctx.substr), expression(ctx.str))
}
/**
* Create a (windowed) Function expression.
*/
override def visitFunctionCall(ctx: FunctionCallContext): Expression = withOrigin(ctx) {
// Create the function call.
val name = ctx.qualifiedName.getText
val isDistinct = Option(ctx.setQuantifier()).exists(_.DISTINCT != null)
val arguments = ctx.argument.asScala.map(expression) match {
case Seq(UnresolvedStar(None))
if name.toLowerCase(Locale.ROOT) == "count" && !isDistinct =>
// Transform COUNT(*) into COUNT(1).
Seq(Literal(1))
case expressions =>
expressions
}
val function = UnresolvedFunction(visitFunctionName(ctx.qualifiedName), arguments, isDistinct)
// Check if the function is evaluated in a windowed context.
ctx.windowSpec match {
case spec: WindowRefContext =>
UnresolvedWindowExpression(function, visitWindowRef(spec))
case spec: WindowDefContext =>
WindowExpression(function, visitWindowDef(spec))
case _ => function
}
}
/**
* Create a current timestamp/date expression. These are different from regular function because
* they do not require the user to specify braces when calling them.
*/
override def visitTimeFunctionCall(ctx: TimeFunctionCallContext): Expression = withOrigin(ctx) {
ctx.name.getType match {
case SqlBaseParser.CURRENT_DATE =>
CurrentDate()
case SqlBaseParser.CURRENT_TIMESTAMP =>
CurrentTimestamp()
}
}
/**
* Create a function database (optional) and name pair.
*/
protected def visitFunctionName(ctx: QualifiedNameContext): FunctionIdentifier = {
ctx.identifier().asScala.map(_.getText) match {
case Seq(db, fn) => FunctionIdentifier(fn, Option(db))
case Seq(fn) => FunctionIdentifier(fn, None)
case other => throw new ParseException(s"Unsupported function name '${ctx.getText}'", ctx)
}
}
/**
* Create a reference to a window frame, i.e. [[WindowSpecReference]].
*/
override def visitWindowRef(ctx: WindowRefContext): WindowSpecReference = withOrigin(ctx) {
WindowSpecReference(ctx.identifier.getText)
}
/**
* Create a window definition, i.e. [[WindowSpecDefinition]].
*/
override def visitWindowDef(ctx: WindowDefContext): WindowSpecDefinition = withOrigin(ctx) {
// CLUSTER BY ... | PARTITION BY ... ORDER BY ...
val partition = ctx.partition.asScala.map(expression)
val order = ctx.sortItem.asScala.map(visitSortItem)
// RANGE/ROWS BETWEEN ...
val frameSpecOption = Option(ctx.windowFrame).map { frame =>
val frameType = frame.frameType.getType match {
case SqlBaseParser.RANGE => RangeFrame
case SqlBaseParser.ROWS => RowFrame
}
SpecifiedWindowFrame(
frameType,
visitFrameBound(frame.start),
Option(frame.end).map(visitFrameBound).getOrElse(CurrentRow))
}
WindowSpecDefinition(
partition,
order,
frameSpecOption.getOrElse(UnspecifiedFrame))
}
/**
* Create or resolve a frame boundary expressions.
*/
override def visitFrameBound(ctx: FrameBoundContext): Expression = withOrigin(ctx) {
def value: Expression = {
val e = expression(ctx.expression)
validate(e.resolved && e.foldable, "Frame bound value must be a literal.", ctx)
e
}
ctx.boundType.getType match {
case SqlBaseParser.PRECEDING if ctx.UNBOUNDED != null =>
UnboundedPreceding
case SqlBaseParser.PRECEDING =>
UnaryMinus(value)
case SqlBaseParser.CURRENT =>
CurrentRow
case SqlBaseParser.FOLLOWING if ctx.UNBOUNDED != null =>
UnboundedFollowing
case SqlBaseParser.FOLLOWING =>
value
}
}
/**
* Create a [[CreateStruct]] expression.
*/
override def visitRowConstructor(ctx: RowConstructorContext): Expression = withOrigin(ctx) {
CreateStruct(ctx.namedExpression().asScala.map(expression))
}
/**
* Create a [[ScalarSubquery]] expression.
*/
override def visitSubqueryExpression(
ctx: SubqueryExpressionContext): Expression = withOrigin(ctx) {
ScalarSubquery(plan(ctx.query))
}
/**
* Create a value based [[CaseWhen]] expression. This has the following SQL form:
* {{{
* CASE [expression]
* WHEN [value] THEN [expression]
* ...
* ELSE [expression]
* END
* }}}
*/
override def visitSimpleCase(ctx: SimpleCaseContext): Expression = withOrigin(ctx) {
val e = expression(ctx.value)
val branches = ctx.whenClause.asScala.map { wCtx =>
(EqualTo(e, expression(wCtx.condition)), expression(wCtx.result))
}
CaseWhen(branches, Option(ctx.elseExpression).map(expression))
}
/**
* Create a condition based [[CaseWhen]] expression. This has the following SQL syntax:
* {{{
* CASE
* WHEN [predicate] THEN [expression]
* ...
* ELSE [expression]
* END
* }}}
*
* @param ctx the parse tree
* */
override def visitSearchedCase(ctx: SearchedCaseContext): Expression = withOrigin(ctx) {
val branches = ctx.whenClause.asScala.map { wCtx =>
(expression(wCtx.condition), expression(wCtx.result))
}
CaseWhen(branches, Option(ctx.elseExpression).map(expression))
}
/**
* Currently only regex in expressions of SELECT statements are supported; in other
* places, e.g., where `(a)?+.+` = 2, regex are not meaningful.
*/
private def canApplyRegex(ctx: ParserRuleContext): Boolean = withOrigin(ctx) {
var parent = ctx.getParent
while (parent != null) {
if (parent.isInstanceOf[NamedExpressionContext]) return true
parent = parent.getParent
}
return false
}
/**
* Create a dereference expression. The return type depends on the type of the parent.
* If the parent is an [[UnresolvedAttribute]], it can be a [[UnresolvedAttribute]] or
* a [[UnresolvedRegex]] for regex quoted in ``; if the parent is some other expression,
* it can be [[UnresolvedExtractValue]].
*/
override def visitDereference(ctx: DereferenceContext): Expression = withOrigin(ctx) {
val attr = ctx.fieldName.getText
expression(ctx.base) match {
case unresolved_attr @ UnresolvedAttribute(nameParts) =>
ctx.fieldName.getStart.getText match {
case escapedIdentifier(columnNameRegex)
if conf.supportQuotedRegexColumnName && canApplyRegex(ctx) =>
UnresolvedRegex(columnNameRegex, Some(unresolved_attr.name),
conf.caseSensitiveAnalysis)
case _ =>
UnresolvedAttribute(nameParts :+ attr)
}
case e =>
UnresolvedExtractValue(e, Literal(attr))
}
}
/**
* Create an [[UnresolvedAttribute]] expression or a [[UnresolvedRegex]] if it is a regex
* quoted in ``
*/
override def visitColumnReference(ctx: ColumnReferenceContext): Expression = withOrigin(ctx) {
ctx.getStart.getText match {
case escapedIdentifier(columnNameRegex)
if conf.supportQuotedRegexColumnName && canApplyRegex(ctx) =>
UnresolvedRegex(columnNameRegex, None, conf.caseSensitiveAnalysis)
case _ =>
UnresolvedAttribute.quoted(ctx.getText)
}
}
/**
* Create an [[UnresolvedExtractValue]] expression, this is used for subscript access to an array.
*/
override def visitSubscript(ctx: SubscriptContext): Expression = withOrigin(ctx) {
UnresolvedExtractValue(expression(ctx.value), expression(ctx.index))
}
/**
* Create an expression for an expression between parentheses. This is need because the ANTLR
* visitor cannot automatically convert the nested context into an expression.
*/
override def visitParenthesizedExpression(
ctx: ParenthesizedExpressionContext): Expression = withOrigin(ctx) {
expression(ctx.expression)
}
/**
* Create a [[SortOrder]] expression.
*/
override def visitSortItem(ctx: SortItemContext): SortOrder = withOrigin(ctx) {
val direction = if (ctx.DESC != null) {
Descending
} else {
Ascending
}
val nullOrdering = if (ctx.FIRST != null) {
NullsFirst
} else if (ctx.LAST != null) {
NullsLast
} else {
direction.defaultNullOrdering
}
SortOrder(expression(ctx.expression), direction, nullOrdering, Set.empty)
}
/**
* Create a typed Literal expression. A typed literal has the following SQL syntax:
* {{{
* [TYPE] '[VALUE]'
* }}}
* Currently Date, Timestamp and Binary typed literals are supported.
*/
override def visitTypeConstructor(ctx: TypeConstructorContext): Literal = withOrigin(ctx) {
val value = string(ctx.STRING)
val valueType = ctx.identifier.getText.toUpperCase(Locale.ROOT)
try {
valueType match {
case "DATE" =>
Literal(Date.valueOf(value))
case "TIMESTAMP" =>
Literal(Timestamp.valueOf(value))
case "X" =>
val padding = if (value.length % 2 == 1) "0" else ""
Literal(DatatypeConverter.parseHexBinary(padding + value))
case other =>
throw new ParseException(s"Literals of type '$other' are currently not supported.", ctx)
}
} catch {
case e: IllegalArgumentException =>
val message = Option(e.getMessage).getOrElse(s"Exception parsing $valueType")
throw new ParseException(message, ctx)
}
}
/**
* Create a NULL literal expression.
*/
override def visitNullLiteral(ctx: NullLiteralContext): Literal = withOrigin(ctx) {
Literal(null)
}
/**
* Create a Boolean literal expression.
*/
override def visitBooleanLiteral(ctx: BooleanLiteralContext): Literal = withOrigin(ctx) {
if (ctx.getText.toBoolean) {
Literal.TrueLiteral
} else {
Literal.FalseLiteral
}
}
/**
* Create an integral literal expression. The code selects the most narrow integral type
* possible, either a BigDecimal, a Long or an Integer is returned.
*/
override def visitIntegerLiteral(ctx: IntegerLiteralContext): Literal = withOrigin(ctx) {
BigDecimal(ctx.getText) match {
case v if v.isValidInt =>
Literal(v.intValue())
case v if v.isValidLong =>
Literal(v.longValue())
case v => Literal(v.underlying())
}
}
/**
* Create a decimal literal for a regular decimal number.
*/
override def visitDecimalLiteral(ctx: DecimalLiteralContext): Literal = withOrigin(ctx) {
Literal(BigDecimal(ctx.getText).underlying())
}
/** Create a numeric literal expression. */
private def numericLiteral
(ctx: NumberContext, minValue: BigDecimal, maxValue: BigDecimal, typeName: String)
(converter: String => Any): Literal = withOrigin(ctx) {
val rawStrippedQualifier = ctx.getText.substring(0, ctx.getText.length - 1)
try {
val rawBigDecimal = BigDecimal(rawStrippedQualifier)
if (rawBigDecimal < minValue || rawBigDecimal > maxValue) {
throw new ParseException(s"Numeric literal ${rawStrippedQualifier} does not " +
s"fit in range [${minValue}, ${maxValue}] for type ${typeName}", ctx)
}
Literal(converter(rawStrippedQualifier))
} catch {
case e: NumberFormatException =>
throw new ParseException(e.getMessage, ctx)
}
}
/**
* Create a Byte Literal expression.
*/
override def visitTinyIntLiteral(ctx: TinyIntLiteralContext): Literal = {
numericLiteral(ctx, Byte.MinValue, Byte.MaxValue, ByteType.simpleString)(_.toByte)
}
/**
* Create a Short Literal expression.
*/
override def visitSmallIntLiteral(ctx: SmallIntLiteralContext): Literal = {
numericLiteral(ctx, Short.MinValue, Short.MaxValue, ShortType.simpleString)(_.toShort)
}
/**
* Create a Long Literal expression.
*/
override def visitBigIntLiteral(ctx: BigIntLiteralContext): Literal = {
numericLiteral(ctx, Long.MinValue, Long.MaxValue, LongType.simpleString)(_.toLong)
}
/**
* Create a Double Literal expression.
*/
override def visitDoubleLiteral(ctx: DoubleLiteralContext): Literal = {
numericLiteral(ctx, Double.MinValue, Double.MaxValue, DoubleType.simpleString)(_.toDouble)
}
/**
* Create a BigDecimal Literal expression.
*/
override def visitBigDecimalLiteral(ctx: BigDecimalLiteralContext): Literal = {
val raw = ctx.getText.substring(0, ctx.getText.length - 2)
try {
Literal(BigDecimal(raw).underlying())
} catch {
case e: AnalysisException =>
throw new ParseException(e.message, ctx)
}
}
/**
* Create a String literal expression.
*/
override def visitStringLiteral(ctx: StringLiteralContext): Literal = withOrigin(ctx) {
Literal(createString(ctx))
}
/**
* Create a String from a string literal context. This supports multiple consecutive string
* literals, these are concatenated, for example this expression "'hello' 'world'" will be
* converted into "helloworld".
*
* Special characters can be escaped by using Hive/C-style escaping.
*/
private def createString(ctx: StringLiteralContext): String = {
if (conf.escapedStringLiterals) {
ctx.STRING().asScala.map(stringWithoutUnescape).mkString
} else {
ctx.STRING().asScala.map(string).mkString
}
}
/**
* Create a [[CalendarInterval]] literal expression. An interval expression can contain multiple
* unit value pairs, for instance: interval 2 months 2 days.
*/
override def visitInterval(ctx: IntervalContext): Literal = withOrigin(ctx) {
val intervals = ctx.intervalField.asScala.map(visitIntervalField)
validate(intervals.nonEmpty, "at least one time unit should be given for interval literal", ctx)
Literal(intervals.reduce(_.add(_)))
}
/**
* Create a [[CalendarInterval]] for a unit value pair. Two unit configuration types are
* supported:
* - Single unit.
* - From-To unit (only 'YEAR TO MONTH' and 'DAY TO SECOND' are supported).
*/
override def visitIntervalField(ctx: IntervalFieldContext): CalendarInterval = withOrigin(ctx) {
import ctx._
val s = value.getText
try {
val unitText = unit.getText.toLowerCase(Locale.ROOT)
val interval = (unitText, Option(to).map(_.getText.toLowerCase(Locale.ROOT))) match {
case (u, None) if u.endsWith("s") =>
// Handle plural forms, e.g: yearS/monthS/weekS/dayS/hourS/minuteS/hourS/...
CalendarInterval.fromSingleUnitString(u.substring(0, u.length - 1), s)
case (u, None) =>
CalendarInterval.fromSingleUnitString(u, s)
case ("year", Some("month")) =>
CalendarInterval.fromYearMonthString(s)
case ("day", Some("second")) =>
CalendarInterval.fromDayTimeString(s)
case (from, Some(t)) =>
throw new ParseException(s"Intervals FROM $from TO $t are not supported.", ctx)
}
validate(interval != null, "No interval can be constructed", ctx)
interval
} catch {
// Handle Exceptions thrown by CalendarInterval
case e: IllegalArgumentException =>
val pe = new ParseException(e.getMessage, ctx)
pe.setStackTrace(e.getStackTrace)
throw pe
}
}
/* ********************************************************************************************
* DataType parsing
* ******************************************************************************************** */
/**
* Create a Spark DataType.
*/
private def visitSparkDataType(ctx: DataTypeContext): DataType = {
HiveStringType.replaceCharType(typedVisit(ctx))
}
/**
* Resolve/create a primitive type.
*/
override def visitPrimitiveDataType(ctx: PrimitiveDataTypeContext): DataType = withOrigin(ctx) {
val dataType = ctx.identifier.getText.toLowerCase(Locale.ROOT)
(dataType, ctx.INTEGER_VALUE().asScala.toList) match {
case ("boolean", Nil) => BooleanType
case ("tinyint" | "byte", Nil) => ByteType
case ("smallint" | "short", Nil) => ShortType
case ("int" | "integer", Nil) => IntegerType
case ("bigint" | "long", Nil) => LongType
case ("float", Nil) => FloatType
case ("double", Nil) => DoubleType
case ("date", Nil) => DateType
case ("timestamp", Nil) => TimestampType
case ("string", Nil) => StringType
case ("char", length :: Nil) => CharType(length.getText.toInt)
case ("varchar", length :: Nil) => VarcharType(length.getText.toInt)
case ("binary", Nil) => BinaryType
case ("decimal", Nil) => DecimalType.USER_DEFAULT
case ("decimal", precision :: Nil) => DecimalType(precision.getText.toInt, 0)
case ("decimal", precision :: scale :: Nil) =>
DecimalType(precision.getText.toInt, scale.getText.toInt)
case (dt, params) =>
val dtStr = if (params.nonEmpty) s"$dt(${params.mkString(",")})" else dt
throw new ParseException(s"DataType $dtStr is not supported.", ctx)
}
}
/**
* Create a complex DataType. Arrays, Maps and Structures are supported.
*/
override def visitComplexDataType(ctx: ComplexDataTypeContext): DataType = withOrigin(ctx) {
ctx.complex.getType match {
case SqlBaseParser.ARRAY =>
ArrayType(typedVisit(ctx.dataType(0)))
case SqlBaseParser.MAP =>
MapType(typedVisit(ctx.dataType(0)), typedVisit(ctx.dataType(1)))
case SqlBaseParser.STRUCT =>
StructType(Option(ctx.complexColTypeList).toSeq.flatMap(visitComplexColTypeList))
}
}
/**
* Create top level table schema.
*/
protected def createSchema(ctx: ColTypeListContext): StructType = {
StructType(Option(ctx).toSeq.flatMap(visitColTypeList))
}
/**
* Create a [[StructType]] from a number of column definitions.
*/
override def visitColTypeList(ctx: ColTypeListContext): Seq[StructField] = withOrigin(ctx) {
ctx.colType().asScala.map(visitColType)
}
/**
* Create a top level [[StructField]] from a column definition.
*/
override def visitColType(ctx: ColTypeContext): StructField = withOrigin(ctx) {
import ctx._
val builder = new MetadataBuilder
// Add comment to metadata
if (STRING != null) {
builder.putString("comment", string(STRING))
}
// Add Hive type string to metadata.
val rawDataType = typedVisit[DataType](ctx.dataType)
val cleanedDataType = HiveStringType.replaceCharType(rawDataType)
if (rawDataType != cleanedDataType) {
builder.putString(HIVE_TYPE_STRING, rawDataType.catalogString)
}
StructField(
identifier.getText,
cleanedDataType,
nullable = true,
builder.build())
}
/**
* Create a [[StructType]] from a sequence of [[StructField]]s.
*/
protected def createStructType(ctx: ComplexColTypeListContext): StructType = {
StructType(Option(ctx).toSeq.flatMap(visitComplexColTypeList))
}
/**
* Create a [[StructType]] from a number of column definitions.
*/
override def visitComplexColTypeList(
ctx: ComplexColTypeListContext): Seq[StructField] = withOrigin(ctx) {
ctx.complexColType().asScala.map(visitComplexColType)
}
/**
* Create a [[StructField]] from a column definition.
*/
override def visitComplexColType(ctx: ComplexColTypeContext): StructField = withOrigin(ctx) {
import ctx._
val structField = StructField(identifier.getText, typedVisit(dataType), nullable = true)
if (STRING == null) structField else structField.withComment(string(STRING))
}
}
| stanzhai/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala | Scala | apache-2.0 | 59,302 |
package org.iainhull.resttest
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import play.api.libs.json._
import play.api.libs.functional.syntax._
class JsonExtractorsSpec extends FlatSpec with Matchers {
import Dsl._
import JsonExtractors._
import TestData._
"jsonToList" should "deserialise to scala types" in {
jsonToList[String](jsonList, __ \\\\ "name") should be(List("toto", "tata"))
jsonToList[String](jsonDoc, __ \\ "user" \\ "favorite" \\ "colors") should be(List("red", "green"))
}
it should "deserialise to custom types" in {
jsonToList[Person](jsonList, __) should be(List(Toto, Tata))
jsonToList[Person](jsonDoc, __ \\ "users") should be(List(Toto))
}
"jsonToValue" should "deserialise to scala types" in {
jsonToValue[String](jsonDoc, __ \\ "user" \\ "name") should be("toto")
jsonToValue[String](jsonDoc, __ \\ "user" \\ "favorite" \\ "colors" apply (0)) should be("red")
}
it should "deserialise to custom types" in {
jsonToValue[Person](jsonList, __ apply (0)) should be(Toto)
jsonToValue[Person](jsonDoc, __ \\ "user") should be(Toto)
}
def evaluate[T](ext: Extractor[T], json: JsValue): T = ext.op(Response(Status.OK, Map(), Some(Json.stringify(json))))
"jsonBodyAsList" should "deserialise to scala types" in {
evaluate(jsonBodyAsList[Person], jsonList) should be(List(Toto, Tata))
evaluate(jsonBodyAsList[Int](__ \\\\ "age"), jsonList) should be(List(25, 20))
}
it should "include the type in its name" in {
jsonBodyAsList[Person].name should be ("jsonBodyAsList[Person]")
}
"jsonBodyAs" should "deserialise to scala types" in {
evaluate(jsonBodyAs[Person], Json parse personJson) should be(Jason)
evaluate(jsonBodyAs[String](__ \\ "user" \\ "name"), jsonDoc) should be("toto")
}
it should "include the type in its name" in {
jsonBodyAs[Person].name should be ("jsonBodyAs[Person]")
}
} | IainHull/resttest | src/test/scala/org/iainhull/resttest/JsonExtractorsSpec.scala | Scala | apache-2.0 | 1,921 |
package com.github.mnogu.gatling.mqtt.action
import akka.actor.ActorRef
import com.github.mnogu.gatling.mqtt.config.MqttProtocol
import com.github.mnogu.gatling.mqtt.request.builder.MqttAttributes
import io.gatling.core.Predef._
import io.gatling.core.action.{Failable, Interruptable}
import io.gatling.core.result.message.{KO, OK}
import io.gatling.core.result.writer.DataWriterClient
import io.gatling.core.session._
import io.gatling.core.util.TimeHelper._
import io.gatling.core.validation.Validation
import org.fusesource.mqtt.client.{MQTT, Callback, QoS, CallbackConnection}
object MqttRequestAction extends DataWriterClient {
def reportUnbuildableRequest(
requestName: String,
session: Session,
errorMessage: String): Unit = {
val now = nowMillis
writeRequestData(
session, requestName, now, now, now, now, KO, Some(errorMessage))
}
}
class MqttRequestAction(
val mqtt: MQTT,
val mqttAttributes: MqttAttributes,
val mqttProtocol: MqttProtocol,
val next: ActorRef)
extends Interruptable with Failable with DataWriterClient {
private def configureHost(session: Session)(mqtt: MQTT): Validation[MQTT] = {
mqttProtocol.host match {
case Some(host) => host(session).map { resolvedHost =>
mqtt.setHost(resolvedHost)
mqtt
}
case None => mqtt
}
}
private def configureClientId(session: Session)(mqtt: MQTT): Validation[MQTT] = {
mqttProtocol.optionPart.clientId match {
case Some(clientId) => clientId(session).map { resolvedClientId =>
mqtt.setClientId(resolvedClientId)
mqtt
}
case None => mqtt
}
}
private def configureUserName(session: Session)(mqtt: MQTT): Validation[MQTT] = {
mqttProtocol.optionPart.userName match {
case Some(userName) => userName(session).map { resolvedUserName =>
mqtt.setUserName(resolvedUserName)
mqtt
}
case None => mqtt
}
}
private def configurePassword(session: Session)(mqtt: MQTT): Validation[MQTT] = {
mqttProtocol.optionPart.password match {
case Some(password) => password(session).map { resolvedPassword =>
mqtt.setPassword(resolvedPassword)
mqtt
}
case None => mqtt
}
}
private def configureWillTopic(session: Session)(mqtt: MQTT): Validation[MQTT] = {
mqttProtocol.optionPart.willTopic match {
case Some(willTopic) => willTopic(session).map { resolvedWillTopic =>
mqtt.setWillTopic(resolvedWillTopic)
mqtt
}
case None => mqtt
}
}
private def configureWillMessage(session: Session)(mqtt: MQTT): Validation[MQTT] = {
mqttProtocol.optionPart.willMessage match {
case Some(willMessage) => willMessage(session).map { resolvedWillMessage =>
mqtt.setWillMessage(resolvedWillMessage)
mqtt
}
case None => mqtt
}
}
private def configureVersion(session: Session)(mqtt: MQTT): Validation[MQTT] = {
mqttProtocol.optionPart.version match {
case Some(version) => version(session).map { resolvedVersion =>
mqtt.setVersion(resolvedVersion)
mqtt
}
case None => mqtt
}
}
private def configureOptions(mqtt: MQTT) = {
// optionPart
val cleanSession = mqttProtocol.optionPart.cleanSession
if (cleanSession.isDefined) {
mqtt.setCleanSession(cleanSession.get)
}
val keepAlive = mqttProtocol.optionPart.keepAlive
if (keepAlive.isDefined) {
mqtt.setKeepAlive(keepAlive.get)
}
val willQos = mqttProtocol.optionPart.willQos
if (willQos.isDefined) {
mqtt.setWillQos(willQos.get)
}
val willRetain = mqttProtocol.optionPart.willRetain
if (willRetain.isDefined) {
mqtt.setWillRetain(willRetain.get)
}
// reconnectPart
val connectAttemptsMax = mqttProtocol.reconnectPart.connectAttemptsMax
if (connectAttemptsMax.isDefined) {
mqtt.setConnectAttemptsMax(connectAttemptsMax.get)
}
val reconnectAttemptsMax = mqttProtocol.reconnectPart.reconnectAttemptsMax
if (reconnectAttemptsMax.isDefined) {
mqtt.setReconnectAttemptsMax(reconnectAttemptsMax.get)
}
val reconnectDelay = mqttProtocol.reconnectPart.reconnectDelay
if (reconnectDelay.isDefined) {
mqtt.setReconnectDelay(reconnectDelay.get)
}
val reconnectDelayMax = mqttProtocol.reconnectPart.reconnectDelayMax
if (reconnectDelayMax.isDefined) {
mqtt.setReconnectDelayMax(reconnectDelayMax.get)
}
val reconnectBackOffMultiplier =
mqttProtocol.reconnectPart.reconnectBackOffMultiplier
if (reconnectBackOffMultiplier.isDefined) {
mqtt.setReconnectBackOffMultiplier(reconnectBackOffMultiplier.get)
}
// socketPart
val receiveBufferSize = mqttProtocol.socketPart.receiveBufferSize
if (receiveBufferSize.isDefined) {
mqtt.setReceiveBufferSize(receiveBufferSize.get)
}
val sendBufferSize = mqttProtocol.socketPart.sendBufferSize
if (sendBufferSize.isDefined) {
mqtt.setSendBufferSize(sendBufferSize.get)
}
val trafficClass = mqttProtocol.socketPart.trafficClass
if (trafficClass.isDefined) {
mqtt.setTrafficClass(trafficClass.get)
}
// throttlingPart
val maxReadRate = mqttProtocol.throttlingPart.maxReadRate
if (maxReadRate.isDefined) {
mqtt.setMaxReadRate(maxReadRate.get)
}
val maxWriteRate = mqttProtocol.throttlingPart.maxWriteRate
if (maxWriteRate.isDefined) {
mqtt.setMaxWriteRate(maxWriteRate.get)
}
}
override def executeOrFail(session: Session): Validation[Unit] = {
configureHost(session)(mqtt)
.flatMap(configureClientId(session))
.flatMap(configureUserName(session))
.flatMap(configurePassword(session))
.flatMap(configureWillTopic(session))
.flatMap(configureWillMessage(session))
.flatMap(configureVersion(session)).map { resolvedMqtt =>
configureOptions(resolvedMqtt)
val connection = resolvedMqtt.callbackConnection()
connection.connect(new Callback[Void] {
override def onSuccess(void: Void): Unit = {
mqttAttributes.requestName(session).flatMap { resolvedRequestName =>
mqttAttributes.topic(session).flatMap { resolvedTopic =>
sendRequest(
resolvedRequestName,
connection,
resolvedTopic,
mqttAttributes.payload,
mqttAttributes.qos,
mqttAttributes.retain,
session)
}
}
}
override def onFailure(value: Throwable): Unit = {
mqttAttributes.requestName(session).map { resolvedRequestName =>
MqttRequestAction.reportUnbuildableRequest(
resolvedRequestName, session, value.getMessage)
}
connection.disconnect(null)
}
})
}
}
private def sendRequest(
requestName: String,
connection: CallbackConnection,
topic: String,
payload: Expression[String],
qos: QoS,
retain: Boolean,
session: Session): Validation[Unit] = {
payload(session).map { resolvedPayload =>
val requestStartDate = nowMillis
val requestEndDate = nowMillis
connection.publish(
topic, resolvedPayload.getBytes, qos, retain, new Callback[Void] {
override def onFailure(value: Throwable): Unit =
writeData(isSuccess = false, Some(value.getMessage))
override def onSuccess(void: Void): Unit =
writeData(isSuccess = true, None)
private def writeData(isSuccess: Boolean, message: Option[String]) = {
val responseStartDate = nowMillis
val responseEndDate = nowMillis
writeRequestData(
session,
requestName,
requestStartDate,
requestEndDate,
responseStartDate,
responseEndDate,
if (isSuccess) OK else KO,
message)
next ! session
connection.disconnect(null)
}
})
}
}
}
| andreibosco/gatling-mqtt | src/main/scala/com/github/mnogu/gatling/mqtt/action/MqttRequestAction.scala | Scala | apache-2.0 | 8,109 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.fs.storage.common.utils
import java.util.concurrent.TimeUnit
import com.github.benmanes.caffeine.cache.{CacheLoader, Caffeine}
import org.apache.hadoop.fs.{FileContext, FileStatus, Path}
import org.locationtech.geomesa.fs.storage.common.utils.StorageUtils.RemoteIterator
import org.locationtech.geomesa.utils.conf.GeoMesaSystemProperties.SystemProperty
/**
* Caches file statuses to avoid repeated file system operations. Status expires after a
* configurable period, by default 10 minutes.
*/
object PathCache {
val CacheDurationProperty = SystemProperty("geomesa.fs.file.cache.duration", "10 minutes")
private val duration = CacheDurationProperty.toDuration.get.toMillis
// cache for checking existence of files
private val pathCache =
Caffeine.newBuilder().expireAfterWrite(duration, TimeUnit.MILLISECONDS).build(
new CacheLoader[(FileContext, Path), java.lang.Boolean]() {
override def load(key: (FileContext, Path)): java.lang.Boolean = key._1.util.exists(key._2)
}
)
// cache for individual file status
private val statusCache =
Caffeine.newBuilder().expireAfterWrite(duration, TimeUnit.MILLISECONDS).build(
new CacheLoader[(FileContext, Path), FileStatus]() {
override def load(key: (FileContext, Path)): FileStatus = key._1.getFileStatus(key._2)
}
)
// cache for checking directory contents
private val listCache =
Caffeine.newBuilder().expireAfterWrite(duration, TimeUnit.MILLISECONDS).build(
new CacheLoader[(FileContext, Path), Stream[FileStatus]]() {
override def load(key: (FileContext, Path)): Stream[FileStatus] =
RemoteIterator(key._1.listStatus(key._2)).toStream
}
)
/**
* * Register a path as existing
*
* @param fc file context
* @param path path
* @param status file status, if available
* @param list directory contents, if available
*/
def register(fc: FileContext,
path: Path,
status: Option[FileStatus] = None,
list: Option[Stream[FileStatus]] = None): Unit = {
pathCache.put((fc, path), java.lang.Boolean.TRUE)
status.foreach(statusCache.put((fc, path), _))
list.foreach(listCache.put((fc, path), _))
}
/**
* Check to see if a path exists
*
* @param fc file context
* @param path path
* @return
*/
def exists(fc: FileContext, path: Path): Boolean = pathCache.get((fc, path)).booleanValue()
/**
* Gets the file status for a path
*
* @param fc file context
* @param path path
* @return
*/
def status(fc: FileContext, path: Path): FileStatus = statusCache.get((fc, path))
/**
* List the children of a path
*
* @param fc file context
* @param dir directory path
* @return
*/
def list(fc: FileContext, dir: Path): Iterator[FileStatus] = listCache.get((fc, dir)).iterator
/**
* Invalidate any cached values for the path - they will be re-loaded on next access
*
* @param fc file context
* @param path path
*/
def invalidate(fc: FileContext, path: Path): Unit =
Seq(pathCache, statusCache, listCache).foreach(_.invalidate((fc, path)))
}
| ddseapy/geomesa | geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-common/src/main/scala/org/locationtech/geomesa/fs/storage/common/utils/PathCache.scala | Scala | apache-2.0 | 3,721 |
/* (c) 2009-2010 Regents of the University of California */
package org.cdlib.was.weari.tests;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.scalatest._;
import org.scalatest.matchers._;
import org.cdlib.was.weari._;
class WeariTest extends FunSpec with BeforeAndAfter {
describe("weari") {
// it("should parse arcs") {
// val config = new Config {};
// val weari = new Weari(config);
// weari.parseArcsLocal(List[String]("http://archive.org/download/ExampleArcAndWarcFiles/IAH-20080430204825-00000-blackbook.arc.gz"));
// }
}
}
| cdlib/weari | src/test/scala/org/cdlib/was/weari/tests/WeariTest.scala | Scala | bsd-3-clause | 633 |
package cn.gridx.scala.lang.datetime
import org.joda.time.DateTime
/**
* Created by tao on 4/12/16.
*/
object Converters {
def main(args: Array[String]): Unit = {
val milli = 1460449019992L // 2016-04-12T16:16:59.992+08:00
val t1 = toPreSharpHour(milli)
val t2 = toNextSharyHour(milli)
println(t1)
println(t2)
if (t1.plusHours(1).equals(t2))
println("Euqals")
else
println("Not equals")
if (t1.isBefore(t2))
println("t1 is before t2")
}
/**
* 将milli对应的时间点转为上一个整小时
* 例如: 2016-06-06 18:28:30 -> 2016-06-06 18:00:00
* */
def toPreSharpHour(milli: Long): DateTime = {
val remainder = milli%(3600*1000)
new DateTime(milli - remainder)
}
/**
* 将milli对应的时间点转化为下一个整小时
* 例如: 2016-06-06 18:28:30 -> 2016-06-06 19:00:00
* */
def toNextSharyHour(milli: Long): DateTime = {
toPreSharpHour(milli).plusHours(1)
}
/**
* 计算t1和t2之包含了多少个hour,
* 例如 18点与22点之间包含了 5个hours (18点, 19点, 20点, 21点, 22点)
* */
def hoursCount(t1: DateTime, t2: DateTime): Unit = {
}
}
| TaoXiao/Scala | lang/src/main/scala/cn/gridx/scala/lang/datetime/Converters.scala | Scala | apache-2.0 | 1,205 |
package expr
import Element.Element
class ExprFormatter {
private val binaryOpGroups = Array(
Set("|", "||"),
Set("&", "&&"),
Set("^"),
Set("==", "!="),
Set("<", "<=", ">", ">="),
Set("+", "-"),
Set("*", "%") // divide is missing because we're formatting that vertically
)
private val binaryOpPrecedence = (
for {
i <- binaryOpGroups.indices
op <- binaryOpGroups(i)
} yield op -> i
).toMap
private val unaryOpPrecedence = binaryOpGroups.length
private val fractionPrecedence = -1
private def format(expr: Expr, enclosingPrecedence: Int): Element = expr match {
case Var(name) => Element.elem(name)
case Number(num) =>
def stripDot(s: String) = if (s endsWith ".0") s.substring(0, s.length - 2) else s
Element.elem(stripDot(num.toString))
case UnOp(op, arg) => Element.elem(op) beside format(arg, unaryOpPrecedence)
case BinOp("/", left, right) =>
val top = format(left, fractionPrecedence)
val bottom = format(right, fractionPrecedence)
val line = Element.elem('-', 1, top.width max bottom.width)
val fraction = top above line above bottom
if (enclosingPrecedence != fractionPrecedence) fraction
else Element.elem(" ") beside fraction beside Element.elem(" ")
case BinOp(op, left, right) =>
val operationPrecedence = binaryOpPrecedence(op)
val leftElem = format(left, operationPrecedence)
val rightElem = format(right, operationPrecedence)
val operation = leftElem beside Element.elem(" " + op + " ") beside rightElem
if (enclosingPrecedence <= operationPrecedence) operation
else Element.elem("(") beside operation beside Element.elem(")")
}
def format(expr: Expr): Element = format(expr, 0)
def simplify(expr: Expr): Expr = expr match {
case UnOp("-", UnOp("-", e)) => simplify(e)
case UnOp("abs", e@UnOp("abs", _)) => simplify(e)
case UnOp(op, e) => UnOp(op, simplify(e))
case BinOp("+", e, Number(0)) => simplify(e)
case BinOp("-", e, Number(0)) => simplify(e)
case BinOp("+", l, r) if simplify(l) == simplify(r) => BinOp("*", simplify(l), Number(2))
case BinOp("-", l, r) if simplify(l) == simplify(r) => Number(0)
case BinOp("*", e, Number(1)) => simplify(e)
case BinOp("/", e, Number(1)) => simplify(e)
case BinOp("/", l, r) if simplify(l) == simplify(r) => Number(1)
case BinOp(op, l, r) => BinOp(op, simplify(l), simplify(r))
case _ => expr
}
}
| mhotchen/programming-in-scala | src/expr/ExprFormatter.scala | Scala | apache-2.0 | 2,489 |
package org.gark87.yajom.base
import java.util
trait CollectionCreator {
def createList[T](): util.List[T] = new util.ArrayList[T]()
def createSet[T](): util.Set[T] = new util.HashSet[T]()
implicit def toCollection[F, T](from: util.Collection[F])(implicit wrap: F => T): util.Set[T] = {
if (from == null)
null
else {
val result = new util.HashSet[T]()
val it = from.iterator()
while (it.hasNext) {
result.add(wrap(it.next()))
}
result
}
}
}
| gark87/yajom | yajom-macros/src/main/scala/org/gark87/yajom/base/CollectionCreator.scala | Scala | mit | 525 |
/*
* (c) Copyright 2020 Micro Focus, L.P.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License v2.0 which accompany this distribution.
*
* The Apache License is available at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.cloudslang.content.google.utils.action
object Outputs {
object SQLDatabaseInstance {
final val CONNECTION_NAME = "connectionName"
final val PUBLIC_IP_ADDRESS = "publicIPAddress"
final val PRIVATE_IP_ADDRESS = "privateIPAddress"
final val SELF_LINK = "selfLink"
}
object StorageBucketOutputs {
final val ACCESS_CONTROL = "accessControl"
final val DEFAULT_EVENT_BASED_HOLD_ENABLED = "defaultEventBasedHoldEnabled"
final val VERSIONING_ENABLED = "versioningEnabled"
final val LOCATION = "location"
final val LOCATION_TYPE = "locationType"
}
}
| CloudSlang/cs-actions | cs-google/src/main/scala/io/cloudslang/content/google/utils/action/Outputs.scala | Scala | apache-2.0 | 1,226 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.rules.dataSet
import org.apache.calcite.plan.{RelOptRule, RelTraitSet}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.convert.ConverterRule
import org.apache.flink.table.plan.nodes.FlinkConventions
import org.apache.flink.table.plan.nodes.dataset.DataSetValues
import org.apache.flink.table.plan.nodes.logical.FlinkLogicalValues
class DataSetValuesRule
extends ConverterRule(
classOf[FlinkLogicalValues],
FlinkConventions.LOGICAL,
FlinkConventions.DATASET,
"DataSetValuesRule")
{
def convert(rel: RelNode): RelNode = {
val values: FlinkLogicalValues = rel.asInstanceOf[FlinkLogicalValues]
val traitSet: RelTraitSet = rel.getTraitSet.replace(FlinkConventions.DATASET)
new DataSetValues(
rel.getCluster,
traitSet,
rel.getRowType,
values.getTuples,
"DataSetValuesRule")
}
}
object DataSetValuesRule {
val INSTANCE: RelOptRule = new DataSetValuesRule
}
| jinglining/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/plan/rules/dataSet/DataSetValuesRule.scala | Scala | apache-2.0 | 1,784 |
/*
* Artificial Intelligence for Humans
* Volume 2: Nature Inspired Algorithms
* Java Version
* http://www.aifh.org
* http://www.jeffheaton.com
*
* Code repository:
* https://github.com/jeffheaton/aifh
*
* Copyright 2014 by Jeff Heaton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package com.heatonresearch.aifh.general.fns
/**
* The Gaussian function is a Radial Basis Function that describes the typical "bell curve", or "normal distribution".
* <p/>
* The Gaussian function requires paramaters that specify the width (over all dimensions), as well as the
* centers of each dimension. So a 3d Gaussian would have the parameters lined up as follows:
* <p/>
* params[0] = width (of all dimensions),
* <p/>
* params[1] = center of dimension 0,
* <p/>
* params[2] = center of dimension 1,
* <p/>
* params[3] = center of dimension 3
* <p/>
* http://en.wikipedia.org/wiki/Gaussian_function
*
* @param theDimensions The number of dimensions.
* @param theParams A vector to hold the parameters.
* @param theIndex The index into the params vector. You can store multiple RBF's in a vector.
*/
class GaussianFunction(theDimensions: Int, theParams: Array[Double], theIndex: Int)
extends AbstractRBF(theDimensions, theParams, theIndex) {
override def evaluate(x: Array[Double]): Double = {
var value = 0.0
val width = getWidth.abs
for(i <- 0 until getDimensions) {
val center = this.getCenter(i)
value += Math.pow(x(i) - center, 2) / (2.0 * width * width)
}
Math.exp(-value)
}
} | PeterLauris/aifh | vol2/vol2-scala-examples/src/main/scala/com/heatonresearch/aifh/general/fns/GaussianFunction.scala | Scala | apache-2.0 | 2,201 |
package sample.test
import org.specs._
trait SeleniumExampleSpec { _: Specification with SeleniumSetupAndTearDown =>
//extends Specification with SeleniumSetupAndTearDown {
"/testkit/ajax" should {
import SeleniumTestClient._
"replace the button with text when clicked" in {
browser.open("/testkit/ajax")
browser.click("clickme")
browser.waitForCondition("""
selenium.browserbot
.getCurrentWindow().document
.getElementById('ajax_button')
.innerHTML == 'Clicked'""",
"1000")
browser.isTextPresent("Clicked") mustBe true
}
}
}
| timperrett/lift-in-action | chapter-14/src/test/scala/SeleniumExampleSpec.scala | Scala | apache-2.0 | 612 |
package monocle.function
import monocle.{Setter, Traversal}
import scala.annotation.implicitNotFound
import cats.{Applicative, Monad, Traverse}
import cats.data.State
import cats.instances.int._
import cats.instances.lazyList._
import cats.syntax.flatMap._
/** [[Plated]] is a type-class for types which can extract their immediate self-similar children.
*
* @tparam A
* the parent and child type of a [[Plated]]
*/
@implicitNotFound(
"Could not find an instance of Plated[${A}], please check Monocle instance location policy to " + "find out which import is necessary"
)
abstract class Plated[A] extends Serializable { self =>
def plate: Traversal[A, A]
}
trait CommonPlatedFunctions {
/** [[Traversal]] of immediate self-similar children */
def plate[A](implicit P: Plated[A]): Traversal[A, A] = P.plate
}
trait PlatedFunctions extends CommonPlatedFunctions {
/** get the immediate self-similar children of a target */
def children[A: Plated](a: A): List[A] = plate[A].getAll(a)
/** rewrite a target by applying a rule as often as possible until it reaches a fixpoint (this is an infinite loop if
* there is no fixpoint)
*/
def rewrite[A: Plated](f: A => Option[A])(a: A): A =
rewriteOf(plate[A].asSetter)(f)(a)
/** rewrite a target by applying a rule within a [[Setter]], as often as possible until it reaches a fixpoint (this is
* an infinite loop if there is no fixpoint)
*/
def rewriteOf[A](l: Setter[A, A])(f: A => Option[A])(a: A): A = {
def go(b: A): A = {
val c = transformOf(l)(go)(b)
f(c).fold(c)(go)
}
go(a)
}
/** transform every element */
def transform[A: Plated](f: A => A)(a: A): A =
transformOf(plate[A].asSetter)(f)(a)
/** transform every element by applying a [[Setter]] */
def transformOf[A](l: Setter[A, A])(f: A => A)(a: A): A =
l.modify(b => transformOf(l)(f)(f(b)))(a)
/** transforming counting changes */
def transformCounting[A: Plated](f: A => Option[A])(a: A): (Int, A) =
transformM[A, State[Int, *]] { b =>
f(b)
.map(c => State((i: Int) => (i + 1, c)))
.getOrElse(State.pure(b))
}(a).runEmpty.value
/** transforming every element using monadic transformation */
def transformM[A: Plated, M[_]: Monad](f: A => M[A])(a: A): M[A] = {
val l = plate[A]
def go(c: A): M[A] =
l.modifyA[M](b => f(b).flatMap(go))(c)
go(a)
}
/** get all transitive self-similar elements of a target, including itself */
def universe[A: Plated](a: A): LazyList[A] = {
val fold = plate[A].asFold
def go(b: A): LazyList[A] = b #:: fold.foldMap[LazyList[A]](go)(b)
go(a)
}
}
object Plated extends PlatedFunctions {
def apply[A](traversal: Traversal[A, A]): Plated[A] =
new Plated[A] {
override val plate: Traversal[A, A] = traversal
}
/** *********************************************************************************************
*/
/** Std instances */
/** *********************************************************************************************
*/
implicit def listPlated[A]: Plated[List[A]] =
Plated(
new Traversal[List[A], List[A]] {
def modifyA[F[_]: Applicative](f: List[A] => F[List[A]])(s: List[A]): F[List[A]] =
s match {
case x :: xs => Applicative[F].map(f(xs))(x :: _)
case Nil => Applicative[F].pure(Nil)
}
}
)
implicit def lazyListPlated[A]: Plated[LazyList[A]] =
Plated(
new Traversal[LazyList[A], LazyList[A]] {
def modifyA[F[_]: Applicative](f: LazyList[A] => F[LazyList[A]])(s: LazyList[A]): F[LazyList[A]] =
s match {
case x #:: xs => Applicative[F].map(f(xs))(x #:: _)
case LazyList() => Applicative[F].pure(LazyList.empty)
}
}
)
implicit val stringPlated: Plated[String] = Plated(
new Traversal[String, String] {
def modifyA[F[_]: Applicative](f: String => F[String])(s: String): F[String] =
s.headOption match {
case Some(h) => Applicative[F].map(f(s.tail))(h.toString ++ _)
case None => Applicative[F].pure("")
}
}
)
implicit def vectorPlated[A]: Plated[Vector[A]] =
Plated(
new Traversal[Vector[A], Vector[A]] {
def modifyA[F[_]: Applicative](f: Vector[A] => F[Vector[A]])(s: Vector[A]): F[Vector[A]] =
s match {
case h +: t => Applicative[F].map(f(t))(h +: _)
case _ => Applicative[F].pure(Vector.empty)
}
}
)
/** *********************************************************************************************
*/
/** Cats instances */
/** *********************************************************************************************
*/
import cats.Now
import cats.data.Chain
import cats.free.{Cofree, Free}
implicit def chainPlated[A]: Plated[Chain[A]] =
new Plated[Chain[A]] {
val plate: Traversal[Chain[A], Chain[A]] =
new Traversal[Chain[A], Chain[A]] {
def modifyA[F[_]: Applicative](f: Chain[A] => F[Chain[A]])(s: Chain[A]): F[Chain[A]] =
s.uncons match {
case Some((x, xs)) => Applicative[F].map(f(xs))(_.prepend(x))
case None => Applicative[F].pure(Chain.empty)
}
}
}
implicit def cofreePlated[S[_]: Traverse, A]: Plated[Cofree[S, A]] =
Plated(
new Traversal[Cofree[S, A], Cofree[S, A]] {
def modifyA[F[_]: Applicative](f: Cofree[S, A] => F[Cofree[S, A]])(s: Cofree[S, A]): F[Cofree[S, A]] =
Applicative[F].map(Traverse[S].traverse(s.tail.value)(f))(t => Cofree(s.head, Now(t)))
}
)
implicit def freePlated[S[_]: Traverse, A]: Plated[Free[S, A]] =
Plated(
new Traversal[Free[S, A], Free[S, A]] {
def modifyA[F[_]: Applicative](f: Free[S, A] => F[Free[S, A]])(s: Free[S, A]): F[Free[S, A]] =
s.resume.fold(
as => Applicative[F].map(Traverse[S].traverse(as)(f))(Free.roll),
x => Applicative[F].pure(Free.pure(x))
)
}
)
}
| julien-truffaut/Monocle | core/shared/src/main/scala/monocle/function/Plated.scala | Scala | mit | 6,122 |
import leon.lang._
import leon.annotation._
object Benchmark {
//-----------------------------------------------------------------------------
// Axioms
//-----------------------------------------------------------------------------
sealed abstract class Nat
case class succ(pred:Nat) extends Nat
case class zero() extends Nat
def pred(n:Nat): Nat = {
require(n!=zero())
n match {
case succ(p) => p
}
}
sealed abstract class Lst
case class cons(head:Nat,tail:Lst) extends Lst
case class nil() extends Lst
def head(l:Lst): Nat = {
require(l!=nil())
l match {
case cons(a,_) => a
}
}
def tail(l:Lst): Lst = {
require(l!=nil())
l match {
case cons(_,a) => a
}
}
sealed abstract class Pair
case class mkpair(first:Nat,second:Nat) extends Pair
def first(p:Pair): Nat = {
p match {
case mkpair(r,_) => r
}
}
def second(p:Pair): Nat = {
p match {
case mkpair(_,r) => r
}
}
sealed abstract class ZLst
case class zcons(zhead:Pair,ztail:ZLst) extends ZLst
case class znil() extends ZLst
def zhead(l:ZLst): Pair = {
require(l!=znil())
l match {
case zcons(a,_) => a
}
}
def ztail(l:ZLst): ZLst = {
require(l!=znil())
l match {
case zcons(_,a) => a
}
}
sealed abstract class Tree
case class node(data:Nat,left:Tree,right:Tree) extends Tree
case class leaf() extends Tree
def data(t:Tree): Nat = {
require(t!=leaf())
t match {
case node(r,_,_) => r
}
}
def left(t:Tree): Tree = {
require(t!=leaf())
t match {
case node(_,r,_) => r
}
}
def right(t:Tree): Tree = {
require(t!=leaf())
t match {
case node(_,_,r) => r
}
}
//def P(n:Nat): Boolean = {???}
//def f(n:Nat): Nat = {???}
def less(x:Nat,y:Nat): Boolean = {
(x,y) match {
case (_,zero()) => false
case (zero(),succ(_)) => true
case (succ(a),succ(b)) => less(a,b)
}
}
def leq(x:Nat,y:Nat): Boolean = {
x==y || less(x,y)
}
def plus(x:Nat,y:Nat): Nat = {
(x,y) match {
case (zero(),n) => n
case (succ(n),m) => succ(plus(n,m))
}
}
def minus(x:Nat,y:Nat): Nat = {
(x,y) match {
case (zero(),n) => zero()
case (n,zero()) => n
case (succ(n),succ(m)) => minus(n,m)
}
}
def mult(x:Nat,y:Nat): Nat = {
(x,y) match {
case (zero(),n) => zero()
case (succ(n),m) => plus(mult(n,m),m)
}
}
def nmax(n:Nat,m:Nat): Nat = {
if(less(n,m)) m else n
}
def nmin(n:Nat,m:Nat): Nat = {
if(less(n,m)) n else m
}
def append(l1:Lst,l2:Lst): Lst = {
(l1,l2) match {
case (nil(),x) => x
case (cons(x,y),z) => cons(x,append(y,z))
}
}
def len(l:Lst): Nat = {
(l) match {
case (nil()) => zero()
case (cons(_,y)) => succ(len(y))
}
}
def drop(n:Nat,l:Lst): Lst = {
(n,l) match {
case (_,nil()) => nil()
case (zero(),x) => x
case (succ(x),cons(_,z)) => drop(x,z)
}
}
def take(n:Nat,l:Lst): Lst = {
(n,l) match {
case (_,nil()) => nil()
case (zero(),x) => nil()
case (succ(x),cons(y,z)) => cons(y,take(x,z))
}
}
def count(n:Nat,l:Lst): Nat = {
(n,l) match {
case (_,nil()) => zero()
case (x,cons(y,z)) => if (x == y) succ(count(x,z)) else count(x,z)
}
}
def last(l:Lst): Nat = {
(l) match {
case (nil()) => zero()
case (cons(x,y)) => if (y==nil()) x else last(y)
}
}
def butlast(l:Lst): Lst = {
(l) match {
case (nil()) => nil()
case (cons(x,y)) => if (y==nil()) nil() else cons(x,butlast(y))
}
}
def mem(n:Nat,l:Lst): Boolean = {
(n,l) match {
case (_,nil()) => false
case (x,cons(y,z)) => (x==y) || (mem(x,z))
}
}
def delete(n:Nat,l:Lst): Lst = {
(n,l) match {
case (_,nil()) => nil()
case (x,cons(y,z)) => if (x==y) delete(x,z) else cons(y,delete(x,z))
}
}
def rev(l:Lst): Lst = {
(l) match {
case (nil()) => nil()
case (cons(x,y)) => append(rev(y),cons(x,nil()))
}
}
/*
def lmap(l:Lst): Lst = {
(l) match {
case (nil()) => nil()
case (cons(x,y)) => cons(f(x),lmap(y))
}
}
def filter(l:Lst): Lst = {
(l) match {
case (nil()) => nil()
case (cons(x,y)) => if(P(x)) cons(x,filter(y)) else filter(y)
}
}
def dropWhile(l:Lst): Lst = {
(l) match {
case (nil()) => nil()
case (cons(x,y)) => if(P(x)) dropWhile(y) else cons(x,y)
}
}
def takeWhile(l:Lst): Lst = {
(l) match {
case (nil()) => nil()
case (cons(x,y)) => if(P(x)) cons(x,takeWhile(y)) else nil()
}
}
*/
def ins1(n:Nat,l:Lst): Lst = {
(n,l) match {
case (i,nil()) => cons(i,nil())
case (i,cons(x,y)) => if (i==x) cons(x,y) else cons(x,ins1(i,y))
}
}
def insort(n:Nat,l:Lst): Lst = {
(n,l) match {
case (i,nil()) => cons(i,nil())
case (i,cons(x,y)) => if (less(i,x)) cons(i,cons(x,y)) else cons(x,insort(i,y))
}
}
def sorted(l:Lst): Boolean = {
(l) match {
case (nil()) => true
case (cons(_,nil())) => true
case (cons(x,cons(z,y))) => sorted(cons(z,y)) && leq(x,z)
}
}
def sort(l:Lst): Lst = {
(l) match {
case (nil()) => nil()
case (cons(x,y)) => insort(x,sort(y))
}
}
def zip(l1:Lst,l2:Lst): ZLst = {
(l1,l2) match {
case (nil(),_) => znil()
case (_,nil()) => znil()
case (cons(x,y),cons(z,w)) => zcons(mkpair(x,z),zip(y,w))
}
}
def zappend(l1:ZLst,l2:ZLst): ZLst = {
(l1,l2) match {
case (znil(),x) => x
case (zcons(x,y),z) => zcons(x,zappend(y,z))
}
}
def zdrop(n:Nat,l:ZLst): ZLst = {
(n,l) match {
case (_,znil()) => znil()
case (zero(),x) => x
case (succ(x),zcons(_,z)) => zdrop(x,z)
}
}
def ztake(n:Nat,l:ZLst): ZLst = {
(n,l) match {
case (_,znil()) => znil()
case (zero(),_) => znil()
case (succ(x),zcons(y,z)) => zcons(y,ztake(x,z))
}
}
def zrev(l:ZLst): ZLst = {
(l) match {
case (znil()) => znil()
case (zcons(x,y)) => zappend(zrev(y),zcons(x,znil()))
}
}
def mirror(t:Tree): Tree = {
(t) match {
case (leaf()) => leaf()
case (node(x,y,z)) => node(x,mirror(z),mirror(y))
}
}
def height(t:Tree): Nat = {
(t) match {
case (leaf()) => zero()
case (node(x,y,z)) => succ(nmax(height(y),height(z)))
}
}
//-----------------------------------------------------------------------------
// GOALS
//-----------------------------------------------------------------------------
@induct
def G1(n:Nat, xs:Lst): Boolean = { (append(take(n, xs), drop(n, xs)) == xs) }.holds
@induct
def G2(n:Nat, l:Lst, m:Lst): Boolean = { (plus(count(n, l), count(n, m)) == count(n, append(l, m))) }.holds
@induct
def G3(n:Nat, l:Lst, m:Lst): Boolean = { leq(count(n, l), count(n, append(l, m))) }.holds
@induct
def G4(n:Nat, l:Lst): Boolean = { (plus(succ(zero()), count(n, l)) == count(n, cons(n, l))) }.holds
@induct
def G5(n:Nat, x:Nat, l:Lst): Boolean = { (!(n == x) || (plus(succ(zero()), count(n, l)) == count(n, cons(x, l)))) }.holds
@induct
def G6(n:Nat, m:Nat): Boolean = { (minus(n, plus(n, m)) == zero()) }.holds
@induct
def G7(n:Nat, m:Nat): Boolean = { (minus(plus(n, m), n) == m) }.holds
@induct
def G8(k:Nat, n:Nat, m:Nat): Boolean = { (minus(plus(k, m), plus(k, n)) == minus(m, n)) }.holds
@induct
def G9(i:Nat, j:Nat, k:Nat): Boolean = { (minus(minus(i, j), k) == minus(i, plus(j, k))) }.holds
@induct
def G10(m:Nat): Boolean = { (minus(m, m) == zero()) }.holds
@induct
def G11(xs:Lst): Boolean = { (drop(zero(), xs) == xs) }.holds
//def G12(n:Nat, xs:Lst): Boolean = { (drop(n, lmap(xs)) == lmap(drop(n, xs))) }.holds
@induct
def G13(n:Nat, x:Nat, xs:Lst): Boolean = { (drop(succ(n), cons(x, xs)) == drop(n, xs)) }.holds
//def G14(xs:Lst, ys:Lst): Boolean = { (filter(append(xs, ys)) == append(filter(xs), filter(ys))) }.holds
@induct
def G15(x:Nat, l:Lst): Boolean = { (len(insort(x, l)) == succ(len(l))) }.holds
@induct
def G16(xs:Lst, x:Nat): Boolean = { (!(xs == nil()) || (last(cons(x, xs)) == x)) }.holds
@induct
def G17(n:Nat): Boolean = { (leq(n, zero()) == (n == zero())) }.holds
@induct
def G18(i:Nat, m:Nat): Boolean = { less(i, succ(plus(i, m))) }.holds
@induct
def G19(n:Nat, xs:Lst): Boolean = { (len(drop(n, xs)) == minus(len(xs), n)) }.holds
@induct
def G20(l:Lst): Boolean = { (len(sort(l)) == len(l)) }.holds
@induct
def G21(n:Nat, m:Nat): Boolean = { leq(n, plus(n, m)) }.holds
@induct
def G22(a:Nat, b:Nat, c:Nat): Boolean = { (nmax(nmax(a, b), c) == nmax(a, nmax(b, c))) }.holds
@induct
def G23(a:Nat, b:Nat): Boolean = { (nmax(a, b) == nmax(b, a)) }.holds
@induct
def G24(a:Nat, b:Nat): Boolean = { ((nmax(a, b) == a) == leq(b, a)) }.holds
@induct
def G25(a:Nat, b:Nat): Boolean = { ((nmax(a, b) == b) == leq(a, b)) }.holds
@induct
def G26(x:Nat, l:Lst, t:Lst): Boolean = { (!mem(x, l) || mem(x, append(l, t))) }.holds
@induct
def G27(x:Nat, l:Lst, t:Lst): Boolean = { (!mem(x, t) || mem(x, append(l, t))) }.holds
@induct
def G28(x:Nat, l:Lst): Boolean = { mem(x, append(l, cons(x, nil()))) }.holds
@induct
def G29(x:Nat, l:Lst): Boolean = { mem(x, ins1(x, l)) }.holds
@induct
def G30(x:Nat, l:Lst): Boolean = { mem(x, insort(x, l)) }.holds
@induct
def G31(a:Nat, b:Nat, c:Nat): Boolean = { (nmin(nmin(a, b), c) == nmin(a, nmin(b, c))) }.holds
@induct
def G32(a:Nat, b:Nat): Boolean = { (nmin(a, b) == nmin(b, a)) }.holds
@induct
def G33(a:Nat, b:Nat): Boolean = { ((nmin(a, b) == a) == leq(a, b)) }.holds
@induct
def G34(a:Nat, b:Nat): Boolean = { ((nmin(a, b) == b) == leq(b, a)) }.holds
//def G35(xs:Lst): Boolean = { (!(???) || (dropWhile(xs) == xs)) }.holds
//def G36(xs:Lst): Boolean = { (!(???) || (takeWhile(xs) == xs)) }.holds
@induct
def G37(x:Nat, l:Lst): Boolean = { (!mem(x, delete(x, l))) }.holds
@induct
def G38(n:Nat, x:Lst): Boolean = { (count(n, append(x, cons(n, nil()))) == succ(count(n, x))) }.holds
@induct
def G39(n:Nat, h:Nat, t:Lst): Boolean = { (plus(count(n, cons(h, nil())), count(n, t)) == count(n, cons(h, t))) }.holds
@induct
def G40(xs:Lst): Boolean = { (take(zero(), xs) == nil()) }.holds
//def G41(n:Nat, xs:Lst): Boolean = { (take(n, lmap(xs)) == lmap(take(n, xs))) }.holds
@induct
def G42(n:Nat, x:Nat, xs:Lst): Boolean = { (take(succ(n), cons(x, xs)) == cons(x, take(n, xs))) }.holds
//def G43(xs:Lst): Boolean = { (append(takeWhile(xs), dropWhile(xs)) == xs) }.holds
@induct
def G44(x:Nat, xs:Lst, ys:Lst): Boolean = { (zip(cons(x, xs), ys) == (if ((ys == nil())) znil() else zcons(mkpair(x, head(ys)), zip(xs, tail(ys))))) }.holds
@induct
def G45(x:Nat, xs:Lst, y:Nat, ys:Lst): Boolean = { (zip(cons(x, xs), cons(y, ys)) == zcons(mkpair(x, y), zip(xs, ys))) }.holds
@induct
def G46(ys:Lst): Boolean = { (zip(nil(), ys) == znil()) }.holds
@induct
def G47(a:Tree): Boolean = { (height(mirror(a)) == height(a)) }.holds
@induct
def G48(xs:Lst): Boolean = { (xs == nil() || (butlast(append(xs, cons(last(xs), nil()))) == xs)) }.holds
@induct
def G49(xs:Lst, ys:Lst): Boolean = { (butlast(append(xs, ys)) == (if ((ys == nil())) butlast(xs) else append(xs, butlast(ys)))) }.holds
@induct
def G50(xs:Lst): Boolean = { (butlast(xs) == take(minus(len(xs), succ(zero())), xs)) }.holds
@induct
def G51(xs:Lst, x:Nat): Boolean = { (butlast(append(xs, cons(x, nil()))) == xs) }.holds
@induct
def G52(n:Nat, l:Lst): Boolean = { (count(n, l) == count(n, rev(l))) }.holds
@induct
def G53(x:Nat, l:Lst): Boolean = { (count(x, l) == count(x, sort(l))) }.holds
@induct
def G54(m:Nat, n:Nat): Boolean = { (minus(plus(m, n), n) == m) }.holds
@induct
def G55(i:Nat, j:Nat, k:Nat): Boolean = { (minus(minus(i, j), k) == minus(minus(i, k), j)) }.holds
@induct
def G56(n:Nat, xs:Lst, ys:Lst): Boolean = { (drop(n, append(xs, ys)) == append(drop(n, xs), drop(minus(n, len(xs)), ys))) }.holds
@induct
def G57(n:Nat, m:Nat, xs:Lst): Boolean = { (drop(n, drop(m, xs)) == drop(plus(n, m), xs)) }.holds
@induct
def G58(n:Nat, m:Nat, xs:Lst): Boolean = { (drop(n, take(m, xs)) == take(minus(m, n), drop(n, xs))) }.holds
@induct
def G59(n:Nat, xs:Lst, ys:Lst): Boolean = { (zdrop(n, zip(xs, ys)) == zip(drop(n, xs), drop(n, ys))) }.holds
@induct
def G60(xs:Lst, ys:Lst): Boolean = { (!(ys == nil()) || (last(append(xs, ys)) == last(xs))) }.holds
@induct
def G61(xs:Lst, ys:Lst): Boolean = { (ys == nil() || (last(append(xs, ys)) == last(ys))) }.holds
@induct
def G62(xs:Lst, ys:Lst): Boolean = { (last(append(xs, ys)) == (if ((ys == nil())) last(xs) else last(ys))) }.holds
@induct
def G63(x:Nat, xs:Lst): Boolean = { (xs == nil() || (last(cons(x, xs)) == last(xs))) }.holds
@induct
def G64(n:Nat, xs:Lst): Boolean = { (!less(n, len(xs)) || (last(drop(n, xs)) == last(xs))) }.holds
@induct
def G65(x:Nat, xs:Lst): Boolean = { (last(append(xs, cons(x, nil()))) == x) }.holds
@induct
def G66(i:Nat, m:Nat): Boolean = { less(i, succ(plus(m, i))) }.holds
//def G67(xs:Lst): Boolean = { leq(len(filter(xs)), len(xs)) }.holds
@induct
def G68(xs:Lst): Boolean = { (len(butlast(xs)) == minus(len(xs), succ(zero()))) }.holds
@induct
def G69(x:Nat, l:Lst): Boolean = { leq(len(delete(x, l)), len(l)) }.holds
@induct
def G70(n:Nat, m:Nat): Boolean = { leq(n, plus(m, n)) }.holds
@induct
def G71(n:Nat, m:Nat): Boolean = { (!leq(m, n) || leq(m, succ(n))) }.holds
@induct
def G72(x:Nat, y:Nat, l:Lst): Boolean = { (!less(x, y) || (mem(x, insort(y, l)) == mem(x, l))) }.holds
@induct
def G73(x:Nat, y:Nat, l:Lst): Boolean = { (x == y || (mem(x, insort(y, l)) == mem(x, l))) }.holds
@induct
def G74(i:Nat, xs:Lst): Boolean = { (rev(drop(i, xs)) == take(minus(len(xs), i), rev(xs))) }.holds
//def G75(xs:Lst): Boolean = { (rev(filter(xs)) == filter(rev(xs))) }.holds
@induct
def G76(i:Nat, xs:Lst): Boolean = { (rev(take(i, xs)) == drop(minus(len(xs), i), rev(xs))) }.holds
@induct
def G77(n:Nat, h:Nat, x:Lst): Boolean = { (n == h || (count(n, append(x, cons(h, nil()))) == count(n, x))) }.holds
@induct
def G78(n:Nat, h:Nat, t:Lst): Boolean = { (plus(count(n, t), count(n, cons(h, nil()))) == count(n, cons(h, t))) }.holds
@induct
def G79(x:Nat, l:Lst): Boolean = { (!sorted(l) || sorted(insort(x, l))) }.holds
@induct
def G80(l:Lst): Boolean = { sorted(sort(l)) }.holds
@induct
def G81(m:Nat, n:Nat, k:Nat): Boolean = { (minus(minus(succ(m), n), succ(k)) == minus(minus(m, n), k)) }.holds
@induct
def G82(n:Nat, xs:Lst, ys:Lst): Boolean = { (take(n, append(xs, ys)) == append(take(n, xs), take(minus(n, len(xs)), ys))) }.holds
@induct
def G83(n:Nat, m:Nat, xs:Lst): Boolean = { (take(n, drop(m, xs)) == drop(m, take(plus(n, m), xs))) }.holds
@induct
def G84(n:Nat, xs:Lst, ys:Lst): Boolean = { (ztake(n, zip(xs, ys)) == zip(take(n, xs), take(n, ys))) }.holds
@induct
def G85(xs:Lst, ys:Lst, zs:Lst): Boolean = { (zip(append(xs, ys), zs) == zappend(zip(xs, take(len(xs), zs)), zip(ys, drop(len(xs), zs)))) }.holds
@induct
def G86(xs:Lst, ys:Lst, zs:Lst): Boolean = { (zip(xs, append(ys, zs)) == zappend(zip(take(len(ys), xs), ys), zip(drop(len(ys), xs), zs))) }.holds
@induct
def G87(xs:Lst, ys:Lst): Boolean = { (!(len(xs) == len(ys)) || (zip(rev(xs), rev(ys)) == zrev(zip(xs, ys)))) }.holds
}
| ericpony/scala-examples | testcases/verification/proof-goals/Goals01_induct.scala | Scala | mit | 16,516 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.api.python
import java.io._
import java.net._
import java.nio.charset.StandardCharsets
import java.util.{ArrayList => JArrayList, List => JList, Map => JMap}
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.language.existentials
import scala.util.control.NonFatal
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.io.compress.CompressionCodec
import org.apache.hadoop.mapred.{InputFormat, JobConf, OutputFormat}
import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat, OutputFormat => NewOutputFormat}
import org.apache.spark._
import org.apache.spark.api.java.{JavaPairRDD, JavaRDD, JavaSparkContext}
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.input.PortableDataStream
import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.util._
private[spark] class PythonRDD(
parent: RDD[_],
func: PythonFunction,
preservePartitoning: Boolean)
extends RDD[Array[Byte]](parent) {
val bufferSize = conf.getInt("spark.buffer.size", 65536)
val reuse_worker = conf.getBoolean("spark.python.worker.reuse", true)
override def getPartitions: Array[Partition] = firstParent.partitions
override val partitioner: Option[Partitioner] = {
if (preservePartitoning) firstParent.partitioner else None
}
val asJavaRDD: JavaRDD[Array[Byte]] = JavaRDD.fromRDD(this)
override def compute(split: Partition, context: TaskContext): Iterator[Array[Byte]] = {
val runner = PythonRunner(func, bufferSize, reuse_worker)
runner.compute(firstParent.iterator(split, context), split.index, context)
}
}
/**
* A wrapper for a Python function, contains all necessary context to run the function in Python
* runner.
*/
private[spark] case class PythonFunction(
command: Array[Byte],
envVars: JMap[String, String],
pythonIncludes: JList[String],
pythonExec: String,
pythonVer: String,
broadcastVars: JList[Broadcast[PythonBroadcast]],
accumulator: PythonAccumulatorV2)
/**
* A wrapper for chained Python functions (from bottom to top).
* @param funcs
*/
private[spark] case class ChainedPythonFunctions(funcs: Seq[PythonFunction])
private[spark] object PythonRunner {
def apply(func: PythonFunction, bufferSize: Int, reuse_worker: Boolean): PythonRunner = {
new PythonRunner(
Seq(ChainedPythonFunctions(Seq(func))), bufferSize, reuse_worker, false, Array(Array(0)))
}
}
/**
* A helper class to run Python mapPartition/UDFs in Spark.
*
* funcs is a list of independent Python functions, each one of them is a list of chained Python
* functions (from bottom to top).
*/
private[spark] class PythonRunner(
funcs: Seq[ChainedPythonFunctions],
bufferSize: Int,
reuse_worker: Boolean,
isUDF: Boolean,
argOffsets: Array[Array[Int]])
extends Logging {
require(funcs.length == argOffsets.length, "argOffsets should have the same length as funcs")
// All the Python functions should have the same exec, version and envvars.
private val envVars = funcs.head.funcs.head.envVars
private val pythonExec = funcs.head.funcs.head.pythonExec
private val pythonVer = funcs.head.funcs.head.pythonVer
// TODO: support accumulator in multiple UDF
private val accumulator = funcs.head.funcs.head.accumulator
def compute(
inputIterator: Iterator[_],
partitionIndex: Int,
context: TaskContext): Iterator[Array[Byte]] = {
val startTime = System.currentTimeMillis
val env = SparkEnv.get
val localdir = env.blockManager.diskBlockManager.localDirs.map(f => f.getPath()).mkString(",")
envVars.put("SPARK_LOCAL_DIRS", localdir) // it's also used in monitor thread
if (reuse_worker) {
envVars.put("SPARK_REUSE_WORKER", "1")
}
val worker: Socket = env.createPythonWorker(pythonExec, envVars.asScala.toMap)
// Whether is the worker released into idle pool
@volatile var released = false
// Start a thread to feed the process input from our parent's iterator
val writerThread = new WriterThread(env, worker, inputIterator, partitionIndex, context)
context.addTaskCompletionListener { context =>
writerThread.shutdownOnTaskCompletion()
if (!reuse_worker || !released) {
try {
worker.close()
} catch {
case e: Exception =>
logWarning("Failed to close worker socket", e)
}
}
}
writerThread.start()
new MonitorThread(env, worker, context).start()
// Return an iterator that read lines from the process's stdout
val stream = new DataInputStream(new BufferedInputStream(worker.getInputStream, bufferSize))
val stdoutIterator = new Iterator[Array[Byte]] {
override def next(): Array[Byte] = {
val obj = _nextObj
if (hasNext) {
_nextObj = read()
}
obj
}
private def read(): Array[Byte] = {
if (writerThread.exception.isDefined) {
throw writerThread.exception.get
}
try {
stream.readInt() match {
case length if length > 0 =>
val obj = new Array[Byte](length)
stream.readFully(obj)
obj
case 0 => Array.empty[Byte]
case SpecialLengths.TIMING_DATA =>
// Timing data from worker
val bootTime = stream.readLong()
val initTime = stream.readLong()
val finishTime = stream.readLong()
val boot = bootTime - startTime
val init = initTime - bootTime
val finish = finishTime - initTime
val total = finishTime - startTime
logInfo("Times: total = %s, boot = %s, init = %s, finish = %s".format(total, boot,
init, finish))
val memoryBytesSpilled = stream.readLong()
val diskBytesSpilled = stream.readLong()
context.taskMetrics.incMemoryBytesSpilled(memoryBytesSpilled)
context.taskMetrics.incDiskBytesSpilled(diskBytesSpilled)
read()
case SpecialLengths.PYTHON_EXCEPTION_THROWN =>
// Signals that an exception has been thrown in python
val exLength = stream.readInt()
val obj = new Array[Byte](exLength)
stream.readFully(obj)
throw new PythonException(new String(obj, StandardCharsets.UTF_8),
writerThread.exception.getOrElse(null))
case SpecialLengths.END_OF_DATA_SECTION =>
// We've finished the data section of the output, but we can still
// read some accumulator updates:
val numAccumulatorUpdates = stream.readInt()
(1 to numAccumulatorUpdates).foreach { _ =>
val updateLen = stream.readInt()
val update = new Array[Byte](updateLen)
stream.readFully(update)
accumulator.add(update)
}
// Check whether the worker is ready to be re-used.
if (stream.readInt() == SpecialLengths.END_OF_STREAM) {
if (reuse_worker) {
env.releasePythonWorker(pythonExec, envVars.asScala.toMap, worker)
released = true
}
}
null
}
} catch {
case e: Exception if context.isInterrupted =>
logDebug("Exception thrown after task interruption", e)
throw new TaskKilledException(context.getKillReason().getOrElse("unknown reason"))
case e: Exception if env.isStopped =>
logDebug("Exception thrown after context is stopped", e)
null // exit silently
case e: Exception if writerThread.exception.isDefined =>
logError("Python worker exited unexpectedly (crashed)", e)
logError("This may have been caused by a prior exception:", writerThread.exception.get)
throw writerThread.exception.get
case eof: EOFException =>
throw new SparkException("Python worker exited unexpectedly (crashed)", eof)
}
}
var _nextObj = read()
override def hasNext: Boolean = _nextObj != null
}
new InterruptibleIterator(context, stdoutIterator)
}
/**
* The thread responsible for writing the data from the PythonRDD's parent iterator to the
* Python process.
*/
class WriterThread(
env: SparkEnv,
worker: Socket,
inputIterator: Iterator[_],
partitionIndex: Int,
context: TaskContext)
extends Thread(s"stdout writer for $pythonExec") {
@volatile private var _exception: Exception = null
private val pythonIncludes = funcs.flatMap(_.funcs.flatMap(_.pythonIncludes.asScala)).toSet
private val broadcastVars = funcs.flatMap(_.funcs.flatMap(_.broadcastVars.asScala))
setDaemon(true)
/** Contains the exception thrown while writing the parent iterator to the Python process. */
def exception: Option[Exception] = Option(_exception)
/** Terminates the writer thread, ignoring any exceptions that may occur due to cleanup. */
def shutdownOnTaskCompletion() {
assert(context.isCompleted)
this.interrupt()
}
override def run(): Unit = Utils.logUncaughtExceptions {
try {
TaskContext.setTaskContext(context)
val stream = new BufferedOutputStream(worker.getOutputStream, bufferSize)
val dataOut = new DataOutputStream(stream)
// Partition index
dataOut.writeInt(partitionIndex)
// Python version of driver
PythonRDD.writeUTF(pythonVer, dataOut)
// Write out the TaskContextInfo
dataOut.writeInt(context.stageId())
dataOut.writeInt(context.partitionId())
dataOut.writeInt(context.attemptNumber())
dataOut.writeLong(context.taskAttemptId())
// sparkFilesDir
PythonRDD.writeUTF(SparkFiles.getRootDirectory(), dataOut)
// Python includes (*.zip and *.egg files)
dataOut.writeInt(pythonIncludes.size)
for (include <- pythonIncludes) {
PythonRDD.writeUTF(include, dataOut)
}
// Broadcast variables
val oldBids = PythonRDD.getWorkerBroadcasts(worker)
val newBids = broadcastVars.map(_.id).toSet
// number of different broadcasts
val toRemove = oldBids.diff(newBids)
val cnt = toRemove.size + newBids.diff(oldBids).size
dataOut.writeInt(cnt)
for (bid <- toRemove) {
// remove the broadcast from worker
dataOut.writeLong(- bid - 1) // bid >= 0
oldBids.remove(bid)
}
for (broadcast <- broadcastVars) {
if (!oldBids.contains(broadcast.id)) {
// send new broadcast
dataOut.writeLong(broadcast.id)
PythonRDD.writeUTF(broadcast.value.path, dataOut)
oldBids.add(broadcast.id)
}
}
dataOut.flush()
// Serialized command:
if (isUDF) {
dataOut.writeInt(1)
dataOut.writeInt(funcs.length)
funcs.zip(argOffsets).foreach { case (chained, offsets) =>
dataOut.writeInt(offsets.length)
offsets.foreach { offset =>
dataOut.writeInt(offset)
}
dataOut.writeInt(chained.funcs.length)
chained.funcs.foreach { f =>
dataOut.writeInt(f.command.length)
dataOut.write(f.command)
}
}
} else {
dataOut.writeInt(0)
val command = funcs.head.funcs.head.command
dataOut.writeInt(command.length)
dataOut.write(command)
}
// Data values
PythonRDD.writeIteratorToStream(inputIterator, dataOut)
dataOut.writeInt(SpecialLengths.END_OF_DATA_SECTION)
dataOut.writeInt(SpecialLengths.END_OF_STREAM)
dataOut.flush()
} catch {
case e: Exception if context.isCompleted || context.isInterrupted =>
logDebug("Exception thrown after task completion (likely due to cleanup)", e)
if (!worker.isClosed) {
Utils.tryLog(worker.shutdownOutput())
}
case e: Exception =>
// We must avoid throwing exceptions here, because the thread uncaught exception handler
// will kill the whole executor (see org.apache.spark.executor.Executor).
_exception = e
if (!worker.isClosed) {
Utils.tryLog(worker.shutdownOutput())
}
}
}
}
/**
* It is necessary to have a monitor thread for python workers if the user cancels with
* interrupts disabled. In that case we will need to explicitly kill the worker, otherwise the
* threads can block indefinitely.
*/
class MonitorThread(env: SparkEnv, worker: Socket, context: TaskContext)
extends Thread(s"Worker Monitor for $pythonExec") {
setDaemon(true)
override def run() {
// Kill the worker if it is interrupted, checking until task completion.
// TODO: This has a race condition if interruption occurs, as completed may still become true.
while (!context.isInterrupted && !context.isCompleted) {
Thread.sleep(2000)
}
if (!context.isCompleted) {
try {
logWarning("Incomplete task interrupted: Attempting to kill Python Worker")
env.destroyPythonWorker(pythonExec, envVars.asScala.toMap, worker)
} catch {
case e: Exception =>
logError("Exception when trying to kill worker", e)
}
}
}
}
}
/** Thrown for exceptions in user Python code. */
private class PythonException(msg: String, cause: Exception) extends RuntimeException(msg, cause)
/**
* Form an RDD[(Array[Byte], Array[Byte])] from key-value pairs returned from Python.
* This is used by PySpark's shuffle operations.
*/
private class PairwiseRDD(prev: RDD[Array[Byte]]) extends RDD[(Long, Array[Byte])](prev) {
override def getPartitions: Array[Partition] = prev.partitions
override val partitioner: Option[Partitioner] = prev.partitioner
override def compute(split: Partition, context: TaskContext): Iterator[(Long, Array[Byte])] =
prev.iterator(split, context).grouped(2).map {
case Seq(a, b) => (Utils.deserializeLongValue(a), b)
case x => throw new SparkException("PairwiseRDD: unexpected value: " + x)
}
val asJavaPairRDD : JavaPairRDD[Long, Array[Byte]] = JavaPairRDD.fromRDD(this)
}
private object SpecialLengths {
val END_OF_DATA_SECTION = -1
val PYTHON_EXCEPTION_THROWN = -2
val TIMING_DATA = -3
val END_OF_STREAM = -4
val NULL = -5
}
private[spark] object PythonRDD extends Logging {
// remember the broadcasts sent to each worker
private val workerBroadcasts = new mutable.WeakHashMap[Socket, mutable.Set[Long]]()
def getWorkerBroadcasts(worker: Socket): mutable.Set[Long] = {
synchronized {
workerBroadcasts.getOrElseUpdate(worker, new mutable.HashSet[Long]())
}
}
/**
* Return an RDD of values from an RDD of (Long, Array[Byte]), with preservePartitions=true
*
* This is useful for PySpark to have the partitioner after partitionBy()
*/
def valueOfPair(pair: JavaPairRDD[Long, Array[Byte]]): JavaRDD[Array[Byte]] = {
pair.rdd.mapPartitions(it => it.map(_._2), true)
}
/**
* Adapter for calling SparkContext#runJob from Python.
*
* This method will serve an iterator of an array that contains all elements in the RDD
* (effectively a collect()), but allows you to run on a certain subset of partitions,
* or to enable local execution.
*
* @return the port number of a local socket which serves the data collected from this job.
*/
def runJob(
sc: SparkContext,
rdd: JavaRDD[Array[Byte]],
partitions: JArrayList[Int]): Int = {
type ByteArray = Array[Byte]
type UnrolledPartition = Array[ByteArray]
val allPartitions: Array[UnrolledPartition] =
sc.runJob(rdd, (x: Iterator[ByteArray]) => x.toArray, partitions.asScala)
val flattenedPartition: UnrolledPartition = Array.concat(allPartitions: _*)
serveIterator(flattenedPartition.iterator,
s"serve RDD ${rdd.id} with partitions ${partitions.asScala.mkString(",")}")
}
/**
* A helper function to collect an RDD as an iterator, then serve it via socket.
*
* @return the port number of a local socket which serves the data collected from this job.
*/
def collectAndServe[T](rdd: RDD[T]): Int = {
serveIterator(rdd.collect().iterator, s"serve RDD ${rdd.id}")
}
def toLocalIteratorAndServe[T](rdd: RDD[T]): Int = {
serveIterator(rdd.toLocalIterator, s"serve toLocalIterator")
}
def readRDDFromFile(sc: JavaSparkContext, filename: String, parallelism: Int):
JavaRDD[Array[Byte]] = {
val file = new DataInputStream(new FileInputStream(filename))
try {
val objs = new mutable.ArrayBuffer[Array[Byte]]
try {
while (true) {
val length = file.readInt()
val obj = new Array[Byte](length)
file.readFully(obj)
objs += obj
}
} catch {
case eof: EOFException => // No-op
}
JavaRDD.fromRDD(sc.sc.parallelize(objs, parallelism))
} finally {
file.close()
}
}
def readBroadcastFromFile(sc: JavaSparkContext, path: String): Broadcast[PythonBroadcast] = {
sc.broadcast(new PythonBroadcast(path))
}
def writeIteratorToStream[T](iter: Iterator[T], dataOut: DataOutputStream) {
def write(obj: Any): Unit = obj match {
case null =>
dataOut.writeInt(SpecialLengths.NULL)
case arr: Array[Byte] =>
dataOut.writeInt(arr.length)
dataOut.write(arr)
case str: String =>
writeUTF(str, dataOut)
case stream: PortableDataStream =>
write(stream.toArray())
case (key, value) =>
write(key)
write(value)
case other =>
throw new SparkException("Unexpected element type " + other.getClass)
}
iter.foreach(write)
}
/**
* Create an RDD from a path using [[org.apache.hadoop.mapred.SequenceFileInputFormat]],
* key and value class.
* A key and/or value converter class can optionally be passed in
* (see [[org.apache.spark.api.python.Converter]])
*/
def sequenceFile[K, V](
sc: JavaSparkContext,
path: String,
keyClassMaybeNull: String,
valueClassMaybeNull: String,
keyConverterClass: String,
valueConverterClass: String,
minSplits: Int,
batchSize: Int): JavaRDD[Array[Byte]] = {
val keyClass = Option(keyClassMaybeNull).getOrElse("org.apache.hadoop.io.Text")
val valueClass = Option(valueClassMaybeNull).getOrElse("org.apache.hadoop.io.Text")
val kc = Utils.classForName(keyClass).asInstanceOf[Class[K]]
val vc = Utils.classForName(valueClass).asInstanceOf[Class[V]]
val rdd = sc.sc.sequenceFile[K, V](path, kc, vc, minSplits)
val confBroadcasted = sc.sc.broadcast(new SerializableConfiguration(sc.hadoopConfiguration()))
val converted = convertRDD(rdd, keyConverterClass, valueConverterClass,
new WritableToJavaConverter(confBroadcasted))
JavaRDD.fromRDD(SerDeUtil.pairRDDToPython(converted, batchSize))
}
/**
* Create an RDD from a file path, using an arbitrary [[org.apache.hadoop.mapreduce.InputFormat]],
* key and value class.
* A key and/or value converter class can optionally be passed in
* (see [[org.apache.spark.api.python.Converter]])
*/
def newAPIHadoopFile[K, V, F <: NewInputFormat[K, V]](
sc: JavaSparkContext,
path: String,
inputFormatClass: String,
keyClass: String,
valueClass: String,
keyConverterClass: String,
valueConverterClass: String,
confAsMap: java.util.HashMap[String, String],
batchSize: Int): JavaRDD[Array[Byte]] = {
val mergedConf = getMergedConf(confAsMap, sc.hadoopConfiguration())
val rdd =
newAPIHadoopRDDFromClassNames[K, V, F](sc,
Some(path), inputFormatClass, keyClass, valueClass, mergedConf)
val confBroadcasted = sc.sc.broadcast(new SerializableConfiguration(mergedConf))
val converted = convertRDD(rdd, keyConverterClass, valueConverterClass,
new WritableToJavaConverter(confBroadcasted))
JavaRDD.fromRDD(SerDeUtil.pairRDDToPython(converted, batchSize))
}
/**
* Create an RDD from a [[org.apache.hadoop.conf.Configuration]] converted from a map that is
* passed in from Python, using an arbitrary [[org.apache.hadoop.mapreduce.InputFormat]],
* key and value class.
* A key and/or value converter class can optionally be passed in
* (see [[org.apache.spark.api.python.Converter]])
*/
def newAPIHadoopRDD[K, V, F <: NewInputFormat[K, V]](
sc: JavaSparkContext,
inputFormatClass: String,
keyClass: String,
valueClass: String,
keyConverterClass: String,
valueConverterClass: String,
confAsMap: java.util.HashMap[String, String],
batchSize: Int): JavaRDD[Array[Byte]] = {
val conf = PythonHadoopUtil.mapToConf(confAsMap)
val rdd =
newAPIHadoopRDDFromClassNames[K, V, F](sc,
None, inputFormatClass, keyClass, valueClass, conf)
val confBroadcasted = sc.sc.broadcast(new SerializableConfiguration(conf))
val converted = convertRDD(rdd, keyConverterClass, valueConverterClass,
new WritableToJavaConverter(confBroadcasted))
JavaRDD.fromRDD(SerDeUtil.pairRDDToPython(converted, batchSize))
}
private def newAPIHadoopRDDFromClassNames[K, V, F <: NewInputFormat[K, V]](
sc: JavaSparkContext,
path: Option[String] = None,
inputFormatClass: String,
keyClass: String,
valueClass: String,
conf: Configuration): RDD[(K, V)] = {
val kc = Utils.classForName(keyClass).asInstanceOf[Class[K]]
val vc = Utils.classForName(valueClass).asInstanceOf[Class[V]]
val fc = Utils.classForName(inputFormatClass).asInstanceOf[Class[F]]
if (path.isDefined) {
sc.sc.newAPIHadoopFile[K, V, F](path.get, fc, kc, vc, conf)
} else {
sc.sc.newAPIHadoopRDD[K, V, F](conf, fc, kc, vc)
}
}
/**
* Create an RDD from a file path, using an arbitrary [[org.apache.hadoop.mapred.InputFormat]],
* key and value class.
* A key and/or value converter class can optionally be passed in
* (see [[org.apache.spark.api.python.Converter]])
*/
def hadoopFile[K, V, F <: InputFormat[K, V]](
sc: JavaSparkContext,
path: String,
inputFormatClass: String,
keyClass: String,
valueClass: String,
keyConverterClass: String,
valueConverterClass: String,
confAsMap: java.util.HashMap[String, String],
batchSize: Int): JavaRDD[Array[Byte]] = {
val mergedConf = getMergedConf(confAsMap, sc.hadoopConfiguration())
val rdd =
hadoopRDDFromClassNames[K, V, F](sc,
Some(path), inputFormatClass, keyClass, valueClass, mergedConf)
val confBroadcasted = sc.sc.broadcast(new SerializableConfiguration(mergedConf))
val converted = convertRDD(rdd, keyConverterClass, valueConverterClass,
new WritableToJavaConverter(confBroadcasted))
JavaRDD.fromRDD(SerDeUtil.pairRDDToPython(converted, batchSize))
}
/**
* Create an RDD from a [[org.apache.hadoop.conf.Configuration]] converted from a map
* that is passed in from Python, using an arbitrary [[org.apache.hadoop.mapred.InputFormat]],
* key and value class
* A key and/or value converter class can optionally be passed in
* (see [[org.apache.spark.api.python.Converter]])
*/
def hadoopRDD[K, V, F <: InputFormat[K, V]](
sc: JavaSparkContext,
inputFormatClass: String,
keyClass: String,
valueClass: String,
keyConverterClass: String,
valueConverterClass: String,
confAsMap: java.util.HashMap[String, String],
batchSize: Int): JavaRDD[Array[Byte]] = {
val conf = PythonHadoopUtil.mapToConf(confAsMap)
val rdd =
hadoopRDDFromClassNames[K, V, F](sc,
None, inputFormatClass, keyClass, valueClass, conf)
val confBroadcasted = sc.sc.broadcast(new SerializableConfiguration(conf))
val converted = convertRDD(rdd, keyConverterClass, valueConverterClass,
new WritableToJavaConverter(confBroadcasted))
JavaRDD.fromRDD(SerDeUtil.pairRDDToPython(converted, batchSize))
}
private def hadoopRDDFromClassNames[K, V, F <: InputFormat[K, V]](
sc: JavaSparkContext,
path: Option[String] = None,
inputFormatClass: String,
keyClass: String,
valueClass: String,
conf: Configuration) = {
val kc = Utils.classForName(keyClass).asInstanceOf[Class[K]]
val vc = Utils.classForName(valueClass).asInstanceOf[Class[V]]
val fc = Utils.classForName(inputFormatClass).asInstanceOf[Class[F]]
if (path.isDefined) {
sc.sc.hadoopFile(path.get, fc, kc, vc)
} else {
sc.sc.hadoopRDD(new JobConf(conf), fc, kc, vc)
}
}
def writeUTF(str: String, dataOut: DataOutputStream) {
val bytes = str.getBytes(StandardCharsets.UTF_8)
dataOut.writeInt(bytes.length)
dataOut.write(bytes)
}
/**
* Create a socket server and a background thread to serve the data in `items`,
*
* The socket server can only accept one connection, or close if no connection
* in 3 seconds.
*
* Once a connection comes in, it tries to serialize all the data in `items`
* and send them into this connection.
*
* The thread will terminate after all the data are sent or any exceptions happen.
*/
def serveIterator[T](items: Iterator[T], threadName: String): Int = {
val serverSocket = new ServerSocket(0, 1, InetAddress.getByName("localhost"))
// Close the socket if no connection in 3 seconds
serverSocket.setSoTimeout(3000)
new Thread(threadName) {
setDaemon(true)
override def run() {
try {
val sock = serverSocket.accept()
val out = new DataOutputStream(new BufferedOutputStream(sock.getOutputStream))
Utils.tryWithSafeFinally {
writeIteratorToStream(items, out)
} {
out.close()
}
} catch {
case NonFatal(e) =>
logError(s"Error while sending iterator", e)
} finally {
serverSocket.close()
}
}
}.start()
serverSocket.getLocalPort
}
private def getMergedConf(confAsMap: java.util.HashMap[String, String],
baseConf: Configuration): Configuration = {
val conf = PythonHadoopUtil.mapToConf(confAsMap)
PythonHadoopUtil.mergeConfs(baseConf, conf)
}
private def inferKeyValueTypes[K, V](rdd: RDD[(K, V)], keyConverterClass: String = null,
valueConverterClass: String = null): (Class[_], Class[_]) = {
// Peek at an element to figure out key/value types. Since Writables are not serializable,
// we cannot call first() on the converted RDD. Instead, we call first() on the original RDD
// and then convert locally.
val (key, value) = rdd.first()
val (kc, vc) = getKeyValueConverters(keyConverterClass, valueConverterClass,
new JavaToWritableConverter)
(kc.convert(key).getClass, vc.convert(value).getClass)
}
private def getKeyValueTypes(keyClass: String, valueClass: String):
Option[(Class[_], Class[_])] = {
for {
k <- Option(keyClass)
v <- Option(valueClass)
} yield (Utils.classForName(k), Utils.classForName(v))
}
private def getKeyValueConverters(keyConverterClass: String, valueConverterClass: String,
defaultConverter: Converter[Any, Any]): (Converter[Any, Any], Converter[Any, Any]) = {
val keyConverter = Converter.getInstance(Option(keyConverterClass), defaultConverter)
val valueConverter = Converter.getInstance(Option(valueConverterClass), defaultConverter)
(keyConverter, valueConverter)
}
/**
* Convert an RDD of key-value pairs from internal types to serializable types suitable for
* output, or vice versa.
*/
private def convertRDD[K, V](rdd: RDD[(K, V)],
keyConverterClass: String,
valueConverterClass: String,
defaultConverter: Converter[Any, Any]): RDD[(Any, Any)] = {
val (kc, vc) = getKeyValueConverters(keyConverterClass, valueConverterClass,
defaultConverter)
PythonHadoopUtil.convertRDD(rdd, kc, vc)
}
/**
* Output a Python RDD of key-value pairs as a Hadoop SequenceFile using the Writable types
* we convert from the RDD's key and value types. Note that keys and values can't be
* [[org.apache.hadoop.io.Writable]] types already, since Writables are not Java
* `Serializable` and we can't peek at them. The `path` can be on any Hadoop file system.
*/
def saveAsSequenceFile[K, V, C <: CompressionCodec](
pyRDD: JavaRDD[Array[Byte]],
batchSerialized: Boolean,
path: String,
compressionCodecClass: String): Unit = {
saveAsHadoopFile(
pyRDD, batchSerialized, path, "org.apache.hadoop.mapred.SequenceFileOutputFormat",
null, null, null, null, new java.util.HashMap(), compressionCodecClass)
}
/**
* Output a Python RDD of key-value pairs to any Hadoop file system, using old Hadoop
* `OutputFormat` in mapred package. Keys and values are converted to suitable output
* types using either user specified converters or, if not specified,
* [[org.apache.spark.api.python.JavaToWritableConverter]]. Post-conversion types
* `keyClass` and `valueClass` are automatically inferred if not specified. The passed-in
* `confAsMap` is merged with the default Hadoop conf associated with the SparkContext of
* this RDD.
*/
def saveAsHadoopFile[K, V, F <: OutputFormat[_, _], C <: CompressionCodec](
pyRDD: JavaRDD[Array[Byte]],
batchSerialized: Boolean,
path: String,
outputFormatClass: String,
keyClass: String,
valueClass: String,
keyConverterClass: String,
valueConverterClass: String,
confAsMap: java.util.HashMap[String, String],
compressionCodecClass: String): Unit = {
val rdd = SerDeUtil.pythonToPairRDD(pyRDD, batchSerialized)
val (kc, vc) = getKeyValueTypes(keyClass, valueClass).getOrElse(
inferKeyValueTypes(rdd, keyConverterClass, valueConverterClass))
val mergedConf = getMergedConf(confAsMap, pyRDD.context.hadoopConfiguration)
val codec = Option(compressionCodecClass).map(Utils.classForName(_).asInstanceOf[Class[C]])
val converted = convertRDD(rdd, keyConverterClass, valueConverterClass,
new JavaToWritableConverter)
val fc = Utils.classForName(outputFormatClass).asInstanceOf[Class[F]]
converted.saveAsHadoopFile(path, kc, vc, fc, new JobConf(mergedConf), codec = codec)
}
/**
* Output a Python RDD of key-value pairs to any Hadoop file system, using new Hadoop
* `OutputFormat` in mapreduce package. Keys and values are converted to suitable output
* types using either user specified converters or, if not specified,
* [[org.apache.spark.api.python.JavaToWritableConverter]]. Post-conversion types
* `keyClass` and `valueClass` are automatically inferred if not specified. The passed-in
* `confAsMap` is merged with the default Hadoop conf associated with the SparkContext of
* this RDD.
*/
def saveAsNewAPIHadoopFile[K, V, F <: NewOutputFormat[_, _]](
pyRDD: JavaRDD[Array[Byte]],
batchSerialized: Boolean,
path: String,
outputFormatClass: String,
keyClass: String,
valueClass: String,
keyConverterClass: String,
valueConverterClass: String,
confAsMap: java.util.HashMap[String, String]): Unit = {
val rdd = SerDeUtil.pythonToPairRDD(pyRDD, batchSerialized)
val (kc, vc) = getKeyValueTypes(keyClass, valueClass).getOrElse(
inferKeyValueTypes(rdd, keyConverterClass, valueConverterClass))
val mergedConf = getMergedConf(confAsMap, pyRDD.context.hadoopConfiguration)
val converted = convertRDD(rdd, keyConverterClass, valueConverterClass,
new JavaToWritableConverter)
val fc = Utils.classForName(outputFormatClass).asInstanceOf[Class[F]]
converted.saveAsNewAPIHadoopFile(path, kc, vc, fc, mergedConf)
}
/**
* Output a Python RDD of key-value pairs to any Hadoop file system, using a Hadoop conf
* converted from the passed-in `confAsMap`. The conf should set relevant output params (
* e.g., output path, output format, etc), in the same way as it would be configured for
* a Hadoop MapReduce job. Both old and new Hadoop OutputFormat APIs are supported
* (mapred vs. mapreduce). Keys/values are converted for output using either user specified
* converters or, by default, [[org.apache.spark.api.python.JavaToWritableConverter]].
*/
def saveAsHadoopDataset[K, V](
pyRDD: JavaRDD[Array[Byte]],
batchSerialized: Boolean,
confAsMap: java.util.HashMap[String, String],
keyConverterClass: String,
valueConverterClass: String,
useNewAPI: Boolean): Unit = {
val conf = PythonHadoopUtil.mapToConf(confAsMap)
val converted = convertRDD(SerDeUtil.pythonToPairRDD(pyRDD, batchSerialized),
keyConverterClass, valueConverterClass, new JavaToWritableConverter)
if (useNewAPI) {
converted.saveAsNewAPIHadoopDataset(conf)
} else {
converted.saveAsHadoopDataset(new JobConf(conf))
}
}
}
private
class BytesToString extends org.apache.spark.api.java.function.Function[Array[Byte], String] {
override def call(arr: Array[Byte]) : String = new String(arr, StandardCharsets.UTF_8)
}
/**
* Internal class that acts as an `AccumulatorV2` for Python accumulators. Inside, it
* collects a list of pickled strings that we pass to Python through a socket.
*/
private[spark] class PythonAccumulatorV2(
@transient private val serverHost: String,
private val serverPort: Int)
extends CollectionAccumulator[Array[Byte]] {
Utils.checkHost(serverHost, "Expected hostname")
val bufferSize = SparkEnv.get.conf.getInt("spark.buffer.size", 65536)
/**
* We try to reuse a single Socket to transfer accumulator updates, as they are all added
* by the DAGScheduler's single-threaded RpcEndpoint anyway.
*/
@transient private var socket: Socket = _
private def openSocket(): Socket = synchronized {
if (socket == null || socket.isClosed) {
socket = new Socket(serverHost, serverPort)
}
socket
}
// Need to override so the types match with PythonFunction
override def copyAndReset(): PythonAccumulatorV2 = new PythonAccumulatorV2(serverHost, serverPort)
override def merge(other: AccumulatorV2[Array[Byte], JList[Array[Byte]]]): Unit = synchronized {
val otherPythonAccumulator = other.asInstanceOf[PythonAccumulatorV2]
// This conditional isn't strictly speaking needed - merging only currently happens on the
// driver program - but that isn't gauranteed so incase this changes.
if (serverHost == null) {
// We are on the worker
super.merge(otherPythonAccumulator)
} else {
// This happens on the master, where we pass the updates to Python through a socket
val socket = openSocket()
val in = socket.getInputStream
val out = new DataOutputStream(new BufferedOutputStream(socket.getOutputStream, bufferSize))
val values = other.value
out.writeInt(values.size)
for (array <- values.asScala) {
out.writeInt(array.length)
out.write(array)
}
out.flush()
// Wait for a byte from the Python side as an acknowledgement
val byteRead = in.read()
if (byteRead == -1) {
throw new SparkException("EOF reached before Python server acknowledged")
}
}
}
}
/**
* A Wrapper for Python Broadcast, which is written into disk by Python. It also will
* write the data into disk after deserialization, then Python can read it from disks.
*/
// scalastyle:off no.finalize
private[spark] class PythonBroadcast(@transient var path: String) extends Serializable
with Logging {
/**
* Read data from disks, then copy it to `out`
*/
private def writeObject(out: ObjectOutputStream): Unit = Utils.tryOrIOException {
val in = new FileInputStream(new File(path))
try {
Utils.copyStream(in, out)
} finally {
in.close()
}
}
/**
* Write data into disk, using randomly generated name.
*/
private def readObject(in: ObjectInputStream): Unit = Utils.tryOrIOException {
val dir = new File(Utils.getLocalDir(SparkEnv.get.conf))
val file = File.createTempFile("broadcast", "", dir)
path = file.getAbsolutePath
val out = new FileOutputStream(file)
Utils.tryWithSafeFinally {
Utils.copyStream(in, out)
} {
out.close()
}
}
/**
* Delete the file once the object is GCed.
*/
override def finalize() {
if (!path.isEmpty) {
val file = new File(path)
if (file.exists()) {
if (!file.delete()) {
logWarning(s"Error deleting ${file.getPath}")
}
}
}
}
}
// scalastyle:on no.finalize
| milliman/spark | core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala | Scala | apache-2.0 | 37,918 |
import org.scalatest.{Matchers, FunSuite}
class BubbleSortTest extends FunSuite with Matchers {
test("Bubble Sort should sort") {
val objectForBubbleSort = new BubbleSort
objectForBubbleSort.bubbleSort(Array(11,1,4,22)) should be(Array(1,4,11,22))
objectForBubbleSort.bubbleSort(Array(1,2,3,4,5)) should be(Array(1,2,3,4,5))
}
}
| aayushKumarJarvis/Code-Snippets-JAVA-SCALA | src/test/scala/BubbleSortTest.scala | Scala | mit | 348 |
// scalac: -Xlint:implicit-not-found -Xfatal-warnings
//
package test
import scala.annotation.implicitNotFound
@implicitNotFound(msg = "Cannot construct a collection of type ${Too} with elements of type ${Elem} based on a collection of type ${From}.")
trait Meh[-From, +To]
@implicitNotFound(msg = "Cannot construct a collection of type ${To} ${Elem}.")
trait Meh2[-From, +To]
class C[T]
trait T {
def m[Aaa](implicit @implicitNotFound("I see no C[${Uuh}]") theC: C[Aaa]) = ???
def n[Aaa](implicit @implicitNotFound("I see no C[${Aaa}]") theC: C[Aaa]) = ???
}
trait U[X, Y[_], Z[_, ZZ]] {
class I[R] {
def m[S](implicit @implicitNotFound("${X} ${Y} ${ Z } ${R} ${S} -- ${XX} ${ZZ} ${ Nix }") i: Int) = ???
}
}
| scala/scala | test/files/neg/t2462b.scala | Scala | apache-2.0 | 727 |
package tastytest
object ThisTypes {
abstract class Wrap[T] {
type Base[A] <: { // if not resolved to Sub.this.Base then reflective calls will be needed
def x: A
}
final type Res = Option[Base[T]]
def doTest: Res
}
class Sub extends Wrap[Int] {
class BaseImpl[A](a: A) {
def x: A = a
}
override type Base[A] = BaseImpl[A]
def doTest: Res = Some(new BaseImpl(23))
}
abstract class Wrap2[T] {
class Base[A](a: A) {
def x: A = a
}
final type Res = Option[Base[T]]
def doTest: Res
}
class Sub2 extends Wrap2[Int] {
def doTest: Res = Some(new Base(23))
}
}
| scala/scala | test/tasty/run/src-3/tastytest/ThisTypes.scala | Scala | apache-2.0 | 644 |
package parser
import scala.util.parsing.combinator._
trait BaseParser extends JavaTokenParsers {
val str: Parser[String] = """[a-zA-Z0-9_]+""".r ^^ { _.toString }
val word: Parser[String] = """[a-zA-Z]+""".r ^^ { _.toString }
val int: Parser[Int] = wholeNumber ^^ { _.toInt }
val long: Parser[Long] = wholeNumber ^^ { _.toLong }
val dot: Parser[Any] = "."
val hiphen: Parser[Any] = "-"
}
| jmarin/scale-geocode | src/main/scala/parser/BaseParser.scala | Scala | apache-2.0 | 410 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import org.apache.spark.sql.test.SharedSQLContext
//SQL配置测试套件
class SQLConfSuite extends QueryTest with SharedSQLContext {
private val testKey = "test.key.0"
private val testVal = "test.val.0"
test("propagate from spark conf") {//传播Spark配置文件
// We create a new context here to avoid order dependence with other tests that might call
// clear().
//我们在这里创建一个新的上下文,以避免与其他可能调用的测试顺序依赖关系
val newContext = new SQLContext(ctx.sparkContext)
assert(newContext.getConf("spark.sql.testkey", "false") === "true")
}
test("programmatic ways of basic setting and getting") {//编程方式的基本设置和获取
ctx.conf.clear()
assert(ctx.getAllConfs.size === 0)
ctx.setConf(testKey, testVal)
assert(ctx.getConf(testKey) === testVal)
assert(ctx.getConf(testKey, testVal + "_") === testVal)
assert(ctx.getAllConfs.contains(testKey))
// Tests SQLConf as accessed from a SQLContext is mutable after
// the latter is initialized, unlike SparkConf inside a SparkContext.
//测试sqlconf作为访问一个sqlcontext改变后被初始化
assert(ctx.getConf(testKey) == testVal)
assert(ctx.getConf(testKey, testVal + "_") === testVal)
assert(ctx.getAllConfs.contains(testKey))
ctx.conf.clear()
}
test("parse SQL set commands") {//解析SQL命令集
ctx.conf.clear()
sql(s"set $testKey=$testVal")
assert(ctx.getConf(testKey, testVal + "_") === testVal)
assert(ctx.getConf(testKey, testVal + "_") === testVal)
sql("set some.property=20")
assert(ctx.getConf("some.property", "0") === "20")
sql("set some.property = 40")
assert(ctx.getConf("some.property", "0") === "40")
val key = "spark.sql.key"
val vs = "val0,val_1,val2.3,my_table"
sql(s"set $key=$vs")
assert(ctx.getConf(key, "0") === vs)
sql(s"set $key=")
assert(ctx.getConf(key, "0") === "")
ctx.conf.clear()
}
test("deprecated property") {//不赞成的属性
ctx.conf.clear()
sql(s"set ${SQLConf.Deprecated.MAPRED_REDUCE_TASKS}=10")
assert(ctx.conf.numShufflePartitions === 10)
}
test("invalid conf value") {//无效配置值
ctx.conf.clear()
val e = intercept[IllegalArgumentException] {
sql(s"set ${SQLConf.CASE_SENSITIVE.key}=10")
}
assert(e.getMessage === s"${SQLConf.CASE_SENSITIVE.key} should be boolean, but was 10")
}
}
| tophua/spark1.52 | sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala | Scala | apache-2.0 | 3,271 |
package nexus.typelevel
import shapeless._
/**
* Typelevel function that inserts type [[U]] at the [[I]]-th position in [[A]].
* @author Tongfei Chen
*/
trait InsertAt[A, I <: Nat, U] extends DepFn2[A, U] {
type Out
def index: Int
def inverse: RemoveAt.Aux[Out, I, A]
}
object InsertAt {
def apply[A, I <: Nat, U](implicit o: InsertAt[A, I, U]): Aux[A, I, U, o.Out] = o
type Aux[A, I <: Nat, U, Out0] = InsertAt[A, I, U] { type Out = Out0 }
implicit def case0[At <: HList, Ah]: Aux[At, _0, Ah, Ah :: At] =
new InsertAt[At, _0, Ah] { ix =>
type Out = Ah :: At
def apply(t: At, h: Ah): Ah :: At = h :: t
def index = 0
def inverse: RemoveAt.Aux[Ah :: At, _0, At] = RemoveAt.case0
}
implicit def caseN[At <: HList, Ah, P <: Nat, U, Bt <: HList]
(implicit pix: InsertAt.Aux[At, P, U, Bt]): Aux[Ah :: At, Succ[P], U, Ah :: Bt] =
new InsertAt[Ah :: At, Succ[P], U] { ix =>
type Out = Ah :: Bt
def apply(t: Ah :: At, x: U): Ah :: Bt = t.head :: pix(t.tail, x)
def index = pix.index + 1
def inverse: RemoveAt.Aux[Ah :: Bt, Succ[P], Ah :: At] = RemoveAt.caseN(pix.inverse)
}
implicit def tuple[A, Al <: HList, I <: Nat, U, Bl <: HList, B]
(implicit al: ToHList.Aux[A, Al], ix: InsertAt.Aux[Al, I, U, Bl], bl: FromHList.Aux[Bl, B]): Aux[A, I, U, B] =
new InsertAt[A, I, U] {
type Out = B
def apply(t: A, x: U): B = bl(ix(al(t), x))
def index = ix.index
def inverse: RemoveAt.Aux[B, I, A] = RemoveAt.tuple(bl.inverse, ix.inverse, al.inverse)
}
}
| ctongfei/nexus | tensor/src/main/scala/nexus/typelevel/InsertAt.scala | Scala | mit | 1,561 |
package scalariform.formatter
import scalariform.parser.{CompilationUnit, ScalaParser}
import scalariform.formatter.preferences.{FormattingPreferences, NewlineAtEndOfFile}
/** Tests that top-level parses add a newline when NewlineAtEndOfFile is set. */
class NewlineAtEndOfFileFormatterTest extends AbstractFormatterTest {
type Result = CompilationUnit
// Must parse as a full script to verify the newline formatting.
def parse(parser: ScalaParser): Result = parser.scriptBody()
def format(formatter: ScalaFormatter, result: Result): FormatResult =
formatter.format(result)(FormatterState())
override val debug = false
{
implicit val formattingPreferences: FormattingPreferences =
FormattingPreferences.setPreference(NewlineAtEndOfFile, true)
// No newline; should have one added.
"""import foo.bar
|
|class SettingOn {
|}""" ==>
"""import foo.bar
|
|class SettingOn {
|}
|"""
// Has newline; should stay the same.
"""import foo.bar
|class SettingOn {
|}
|""" ==>
"""import foo.bar
|class SettingOn {
|}
|"""
}
{
implicit val formattingPreferences: FormattingPreferences =
FormattingPreferences.setPreference(NewlineAtEndOfFile, false)
// No newline; should stay the same.
"""import foo.bar
|
|class SettingOff {
|}""" ==>
"""import foo.bar
|
|class SettingOff {
|}"""
// Has newline; should stay the same (preference off doesn't strip newlines that exist).
"""import foo.bar
|class SettingOff {
|}
|""" ==>
"""import foo.bar
|class SettingOff {
|}
|"""
}
}
| scala-ide/scalariform | scalariform/src/test/scala/scalariform/formatter/NewlineAtEndOfFileFormatterTest.scala | Scala | mit | 1,577 |
package com.wavesplatform.state.diffs.ci
import com.wavesplatform.db.WithDomain
import com.wavesplatform.db.WithState.AddrWithBalance
import com.wavesplatform.features.BlockchainFeatures
import com.wavesplatform.lang.directives.values.V4
import com.wavesplatform.lang.script.Script
import com.wavesplatform.lang.v1.compiler.TestCompiler
import com.wavesplatform.settings.TestFunctionalitySettings
import com.wavesplatform.state.diffs.ENOUGH_AMT
import com.wavesplatform.test.PropSpec
import com.wavesplatform.transaction.Asset.IssuedAsset
import com.wavesplatform.transaction.TxHelpers
class ScriptTransferByAliasTest extends PropSpec with WithDomain {
private val activationHeight = 4
private val fsWithV5 = TestFunctionalitySettings.Enabled.copy(
preActivatedFeatures = Map(
BlockchainFeatures.SmartAccounts.id -> 0,
BlockchainFeatures.SmartAssets.id -> 0,
BlockchainFeatures.Ride4DApps.id -> 0,
BlockchainFeatures.FeeSponsorship.id -> 0,
BlockchainFeatures.DataTransaction.id -> 0,
BlockchainFeatures.BlockReward.id -> 0,
BlockchainFeatures.BlockV5.id -> 0,
BlockchainFeatures.SynchronousCalls.id -> activationHeight
),
estimatorPreCheckHeight = Int.MaxValue
)
private val verifier: Script =
TestCompiler(V4).compileExpression(
s"""
| {-# STDLIB_VERSION 4 #-}
| {-# CONTENT_TYPE EXPRESSION #-}
|
| match tx {
| case t: TransferTransaction =>
| match t.recipient {
| case alias: Alias => true
| case address: Address => throw("alias expected!")
| }
| case _ => true
| }
""".stripMargin
)
private val transferAmount = 123
private val alias = "alias"
private def dApp(asset: IssuedAsset): Script = {
TestCompiler(V4).compileContract(
s"""
| {-# STDLIB_VERSION 4 #-}
| {-# CONTENT_TYPE DAPP #-}
| {-# SCRIPT_TYPE ACCOUNT #-}
|
| @Callable(i)
| func default() =
| [
| ScriptTransfer(Alias("$alias"), $transferAmount, base58'$asset')
| ]
""".stripMargin
)
}
property(s"ScriptTransfer alias recipient is mapped correctly after ${BlockchainFeatures.SynchronousCalls} activation") {
val dAppAcc = TxHelpers.signer(0)
val invoker = TxHelpers.signer(1)
val receiver = TxHelpers.signer(2)
val balances = AddrWithBalance.enoughBalances(dAppAcc, invoker, receiver)
val createAlias = TxHelpers.createAlias(alias, receiver)
val issue = TxHelpers.issue(dAppAcc, ENOUGH_AMT, script = Some(verifier))
val asset = IssuedAsset(issue.id())
val setDApp = TxHelpers.setScript(dAppAcc, dApp(asset))
val preparingTxs = Seq(createAlias, issue, setDApp)
val invoke1 = TxHelpers.invoke(dAppAcc.toAddress, func = None, invoker = invoker, fee = TxHelpers.ciFee(sc = 1))
val invoke2 = TxHelpers.invoke(dAppAcc.toAddress, func = None, invoker = invoker, fee = TxHelpers.ciFee(sc = 1))
withDomain(domainSettingsWithFS(fsWithV5), balances) { d =>
d.appendBlock(preparingTxs: _*)
d.appendBlock(invoke1)
d.blockchain.bestLiquidDiff.get.errorMessage(invoke1.id()).get.text should include(
s"Transaction is not allowed by script of the asset $asset: alias expected!"
)
d.appendBlock()
d.blockchainUpdater.height shouldBe activationHeight
d.appendBlock(invoke2)
d.blockchain.bestLiquidDiff.get.errorMessage(invoke2.id()) shouldBe None
d.balance(receiver.toAddress, asset) shouldBe transferAmount
}
}
}
| wavesplatform/Waves | node/src/test/scala/com/wavesplatform/state/diffs/ci/ScriptTransferByAliasTest.scala | Scala | mit | 3,719 |
/*******************************************************************************
Copyright (c) 2012-2015, KAIST, S-Core.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.clone_detector.vgen
import java.io.BufferedWriter
import java.io.File
import java.io.FileWriter
import java.util.Vector
import java.util.regex.Pattern
import kr.ac.kaist.jsaf.compiler.Parser
import kr.ac.kaist.jsaf.nodes.ASTNode
import kr.ac.kaist.jsaf.nodes_util.JSFromHTML
import kr.ac.kaist.jsaf.nodes_util.Span
class VectorGenerator(file: String, minT: Int, sliding_stride: Int, vec_dir: String, isJS: Boolean) {
def this(file: String, minT: Int, sliding_stride: Int, vec_dir: String) = this(file, minT, sliding_stride, vec_dir, true)
val SEP = File.separator
val minTokens = minT
val stride = sliding_stride
val vector_dir = vec_dir
// Generate vectors from a file
val pgm = if (file.endsWith(".js")) Parser.parseFileConvertExn(new File(file), true) else new JSFromHTML(file).parseScripts
/* for debugging
*
System.out.println(file)
System.out.println("Ok")
System.out.println(pgm.serialize())
*/
new JSAstVectorGenerator(pgm, minTokens).doit
//System.out.println(pgm.getInfo.getSpan.getCharVector.toString + "Characteristic Vector")
// Merge vectors
val serializedTree = new Vector[ASTNode]
new JSAstSerializer(pgm, serializedTree, minTokens).doit
val mergedVectors = new Vector[Span]
var itr = serializedTree.iterator
while (itr.hasNext) {
val node = itr.next
if (!node.getInfo.getSpan.getCharVector.isMergeable) {
itr.remove
}
}
/* for debugging
*
for (i <- 0 to serializedTree.size-1)
System.out.println("Serialized Tree: " + serializedTree.elementAt(i).getInfo.getSpan.getCharVector.toString + "\\t" + serializedTree.elementAt(i).getInfo.getSpan.getBegin.getLine + " " + serializedTree.elementAt(i).getInfo.getSpan.getEnd.getLine + " " + serializedTree.elementAt(i).getInfo.getSpan.getCharVector.isMergeable)
*/
if (! serializedTree.isEmpty) {
if (stride != 0) {
var step = 0
var front = 0
var back = front + 1
do {
var span = serializedTree.elementAt(front).getInfo.getSpan
if (serializedTree.size > 1)
for (i <- front+1 to back)
span = new Span(span, serializedTree.elementAt(back).getInfo.getSpan)
while (back != serializedTree.size-1 && !span.getCharVector.containsEnoughTokens(minTokens)) {
back += 1
span = new Span(span, serializedTree.elementAt(back).getInfo.getSpan)
}
if (step % stride == 0) {
if (! mergedVectors.isEmpty) {
val prev = mergedVectors.elementAt(mergedVectors.size-1)
if (prev.begin.getLine != span.begin.getLine || prev.end.getLine != span.end.getLine || prev.getCharVector.getNumOfTokens < span.getCharVector.getNumOfTokens)
mergedVectors.add(span)
} else mergedVectors.add(span)
}
front += 1
step += 1
} while (front < serializedTree.size)
} else {
for (i <- 0 to serializedTree.size-1)
mergedVectors.add(serializedTree.elementAt(i).getInfo.getSpan)
}
/* for debugging
*
for (i <- 0 to mergedVectors.size-1)
System.out.println("Merged vector: " + mergedVectors.elementAt(i).cvec.toString + "\\t" + mergedVectors.elementAt(i).getBegin.getLine + " " + mergedVectors.elementAt(i).getEnd.getLine)
*/
}
// Remove smaller vectors of the same location
var mv_itr = mergedVectors.iterator
if (mv_itr.hasNext()) {
var cur = mv_itr.next
while (mv_itr.hasNext) {
val node = mv_itr.next
if (cur.getFileName.equals(node.getFileName) &&
cur.getBegin.getLine == node.getBegin.getLine &&
cur.getEnd.getLine == node.getEnd.getLine) mv_itr.remove
else cur = node
}
}
// Write vectors to a file
val filename = vector_dir + SEP + "vdb_" + minTokens + "_" + stride
val fstream = new FileWriter(filename, true)
val out = new BufferedWriter(fstream)
val js_pattern = Pattern.compile(".[p|s|x]{0,1}htm[l]{0,1}(.[0-9]+_[0-9]+.js)")
for (i <- 0 to mergedVectors.size-1) {
val s = mergedVectors.elementAt(i)
val node_kind = s.getCharVector.getNodeKind
val sFileName = if (isJS) s.getFileName else {
val matcher = js_pattern.matcher(s.getFileName)
val target = if (matcher.find) matcher.group(1) else ""
s.getFileName.replace(target, "")
}
out.write("# FILE:" + sFileName + ", LINE:" + s.getBegin.getLine + ", OFFSET:" + s.getEnd.getLine + ", NODE_KIND:" + node_kind + ", CONTEXT_KIND:0, NEIGHBOR_KIND:0, NUM_NODE:" + s.getCharVector.getNumOfTokens + ", NUM_DECL:0, NUM_STMT:0, NUM_EXPR:0,")
out.newLine
out.write(s.getCharVector.toString)
out.newLine
}
out.close
def getMergedVectors = mergedVectors
}
| darkrsw/safe | src/main/scala/kr/ac/kaist/jsaf/clone_detector/vgen/VectorGenerator.scala | Scala | bsd-3-clause | 5,045 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.calcite
import org.apache.flink.table.api.{TableException, ValidationException}
import org.apache.flink.table.planner.calcite.FlinkTypeFactory._
import org.apache.flink.table.planner.functions.sql.FlinkSqlOperatorTable
import org.apache.flink.table.planner.plan.nodes.calcite._
import org.apache.flink.table.planner.plan.schema.TimeIndicatorRelDataType
import org.apache.flink.table.planner.plan.utils.TemporalJoinUtil
import org.apache.flink.table.types.logical.TimestampType
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.core._
import org.apache.calcite.rel.hint.RelHint
import org.apache.calcite.rel.logical._
import org.apache.calcite.rel.{RelNode, RelShuttle}
import org.apache.calcite.rex._
import org.apache.calcite.sql.`type`.SqlTypeName
import org.apache.calcite.sql.fun.SqlStdOperatorTable
import org.apache.calcite.sql.fun.SqlStdOperatorTable.FINAL
import java.util.{Collections => JCollections}
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.collection.mutable
/**
* Traverses a [[RelNode]] tree and converts fields with [[TimeIndicatorRelDataType]] type. If a
* time attribute is accessed for a calculation, it will be materialized. Forwarding is allowed in
* some cases, but not all.
*/
class RelTimeIndicatorConverter(rexBuilder: RexBuilder) extends RelShuttle {
private def timestamp(isNullable: Boolean): RelDataType = rexBuilder
.getTypeFactory
.asInstanceOf[FlinkTypeFactory]
.createFieldTypeFromLogicalType(new TimestampType(isNullable, 3))
val materializerUtils = new RexTimeIndicatorMaterializerUtils(rexBuilder)
override def visit(intersect: LogicalIntersect): RelNode = visitSetOp(intersect)
override def visit(union: LogicalUnion): RelNode = visitSetOp(union)
override def visit(aggregate: LogicalAggregate): RelNode = convertAggregate(aggregate)
override def visit(minus: LogicalMinus): RelNode = visitSetOp(minus)
override def visit(sort: LogicalSort): RelNode = {
val input = sort.getInput.accept(this)
LogicalSort.create(input, sort.collation, sort.offset, sort.fetch)
}
override def visit(matchRel: LogicalMatch): RelNode = {
// visit children and update inputs
val input = matchRel.getInput.accept(this)
val rowType = matchRel.getInput.getRowType
val materializer = new RexTimeIndicatorMaterializer(
rexBuilder,
rowType.getFieldList.map(_.getType))
// update input expressions
val patternDefs = matchRel.getPatternDefinitions.mapValues(_.accept(materializer))
val measures = matchRel.getMeasures
.mapValues(_.accept(materializer))
val interval = if (matchRel.getInterval != null) {
matchRel.getInterval.accept(materializer)
} else {
null
}
val isNoLongerTimeIndicator : String => Boolean = fieldName =>
measures.get(fieldName).exists(r => !FlinkTypeFactory.isTimeIndicatorType(r.getType))
// materialize all output types
val outputType = materializerUtils.getRowTypeWithoutIndicators(
matchRel.getRowType,
isNoLongerTimeIndicator)
LogicalMatch.create(
input,
outputType,
matchRel.getPattern,
matchRel.isStrictStart,
matchRel.isStrictEnd,
patternDefs,
measures,
matchRel.getAfter,
matchRel.getSubsets.asInstanceOf[java.util.Map[String, java.util.TreeSet[String]]],
matchRel.isAllRows,
matchRel.getPartitionKeys,
matchRel.getOrderKeys,
interval)
}
override def visit(other: RelNode): RelNode = other match {
case collect: Collect =>
collect
case uncollect: Uncollect =>
// visit children and update inputs
val input = uncollect.getInput.accept(this)
Uncollect.create(uncollect.getTraitSet, input, uncollect.withOrdinality,
JCollections.emptyList())
case scan: LogicalTableFunctionScan =>
scan
case aggregate: LogicalWindowAggregate =>
val convAggregate = convertAggregate(aggregate)
LogicalWindowAggregate.create(
aggregate.getWindow,
aggregate.getNamedProperties,
convAggregate)
case windowTableAggregate: LogicalWindowTableAggregate =>
val correspondingAggregate = new LogicalWindowAggregate(
windowTableAggregate.getCluster,
windowTableAggregate.getTraitSet,
windowTableAggregate.getInput,
windowTableAggregate.getGroupSet,
windowTableAggregate.getAggCallList,
windowTableAggregate.getWindow,
windowTableAggregate.getNamedProperties)
val convAggregate = convertAggregate(correspondingAggregate)
LogicalWindowTableAggregate.create(
windowTableAggregate.getWindow,
windowTableAggregate.getNamedProperties,
convAggregate)
case tableAggregate: LogicalTableAggregate =>
val correspondingAggregate = LogicalAggregate.create(
tableAggregate.getInput,
tableAggregate.getGroupSet,
tableAggregate.getGroupSets,
tableAggregate.getAggCallList)
val convAggregate = convertAggregate(correspondingAggregate)
LogicalTableAggregate.create(convAggregate)
case watermarkAssigner: LogicalWatermarkAssigner =>
watermarkAssigner
case snapshot: LogicalSnapshot =>
val input = snapshot.getInput.accept(this)
snapshot.copy(snapshot.getTraitSet, input, snapshot.getPeriod)
case rank: LogicalRank =>
val input = rank.getInput.accept(this)
rank.copy(rank.getTraitSet, JCollections.singletonList(input))
case sink: LogicalSink =>
val newInput = convertSinkInput(sink.getInput)
new LogicalSink(
sink.getCluster,
sink.getTraitSet,
newInput,
sink.tableIdentifier,
sink.catalogTable,
sink.tableSink,
sink.staticPartitions)
case sink: LogicalLegacySink =>
val newInput = convertSinkInput(sink.getInput)
new LogicalLegacySink(
sink.getCluster,
sink.getTraitSet,
newInput,
sink.sink,
sink.sinkName,
sink.catalogTable,
sink.staticPartitions)
case _ =>
throw new TableException(s"Unsupported logical operator: ${other.getClass.getSimpleName}")
}
override def visit(exchange: LogicalExchange): RelNode =
throw new TableException("Logical exchange in a stream environment is not supported yet.")
override def visit(scan: TableScan): RelNode = scan
override def visit(scan: TableFunctionScan): RelNode =
throw new TableException("Table function scan in a stream environment is not supported yet.")
override def visit(values: LogicalValues): RelNode = values
override def visit(filter: LogicalFilter): RelNode = {
// visit children and update inputs
val input = filter.getInput.accept(this)
// check if input field contains time indicator type
// materialize field if no time indicator is present anymore
// if input field is already materialized, change to timestamp type
val materializer = new RexTimeIndicatorMaterializer(
rexBuilder,
input.getRowType.getFieldList.map(_.getType))
// materialize condition due to filter will validate condition type
val newCondition = filter.getCondition.accept(materializer)
LogicalFilter.create(input, newCondition)
}
override def visit(project: LogicalProject): RelNode = {
// visit children and update inputs
val input = project.getInput.accept(this)
// check if input field contains time indicator type
// materialize field if no time indicator is present anymore
// if input field is already materialized, change to timestamp type
val materializer = new RexTimeIndicatorMaterializer(
rexBuilder,
input.getRowType.getFieldList.map(_.getType))
val projects = project.getProjects.map(_.accept(materializer))
val fieldNames = project.getRowType.getFieldNames
LogicalProject.create(input, JCollections.emptyList[RelHint](), projects, fieldNames)
}
override def visit(join: LogicalJoin): RelNode = {
val left = join.getLeft.accept(this)
val right = join.getRight.accept(this)
// temporal table join
if (TemporalJoinUtil.containsTemporalJoinCondition(join.getCondition)) {
val rewrittenTemporalJoin = join.copy(join.getTraitSet, List(left, right))
// Materialize all of the time attributes from the right side of temporal join
val indicesToMaterialize = (left.getRowType.getFieldCount until
rewrittenTemporalJoin.getRowType.getFieldCount).toSet
materializerUtils.projectAndMaterializeFields(rewrittenTemporalJoin, indicesToMaterialize)
} else {
val newCondition = join.getCondition.accept(new RexShuttle {
private val leftFieldCount = left.getRowType.getFieldCount
private val leftFields = left.getRowType.getFieldList.toList
private val leftRightFields =
(left.getRowType.getFieldList ++ right.getRowType.getFieldList).toList
override def visitInputRef(inputRef: RexInputRef): RexNode = {
if (isTimeIndicatorType(inputRef.getType)) {
val fields = if (inputRef.getIndex < leftFieldCount) {
leftFields
} else {
leftRightFields
}
RexInputRef.of(inputRef.getIndex, fields)
} else {
super.visitInputRef(inputRef)
}
}
})
LogicalJoin.create(left, right, JCollections.emptyList(),
newCondition, join.getVariablesSet, join.getJoinType)
}
}
override def visit(correlate: LogicalCorrelate): RelNode = {
// visit children and update inputs
val inputs = correlate.getInputs.map(_.accept(this))
val right = inputs(1) match {
case scan: LogicalTableFunctionScan =>
// visit children and update inputs
val scanInputs = scan.getInputs.map(_.accept(this))
// check if input field contains time indicator type
// materialize field if no time indicator is present anymore
// if input field is already materialized, change to timestamp type
val materializer = new RexTimeIndicatorMaterializer(
rexBuilder,
inputs.head.getRowType.getFieldList.map(_.getType))
val call = scan.getCall.accept(materializer)
LogicalTableFunctionScan.create(
scan.getCluster,
scanInputs,
call,
scan.getElementType,
scan.getRowType,
scan.getColumnMappings)
case _ =>
inputs(1)
}
LogicalCorrelate.create(
inputs.head,
right,
correlate.getCorrelationId,
correlate.getRequiredColumns,
correlate.getJoinType)
}
def visitSetOp(setOp: SetOp): RelNode = {
// visit children and update inputs
val inputs = setOp.getInputs.map(_.accept(this))
// make sure that time indicator types match
val inputTypes = inputs.map(_.getRowType)
val head = inputTypes.head.getFieldList.map(_.getType)
val isValid = inputTypes.forall { t =>
val fieldTypes = t.getFieldList.map(_.getType)
fieldTypes.zip(head).forall { case (l, r) =>
// check if time indicators match
if (isTimeIndicatorType(l) && isTimeIndicatorType(r)) {
val leftTime = l.asInstanceOf[TimeIndicatorRelDataType].isEventTime
val rightTime = r.asInstanceOf[TimeIndicatorRelDataType].isEventTime
leftTime == rightTime
}
// one side is not an indicator
else if (isTimeIndicatorType(l) || isTimeIndicatorType(r)) {
false
}
// uninteresting types
else {
true
}
}
}
if (!isValid) {
throw new ValidationException(
"Union fields with time attributes have different types.")
}
setOp.copy(setOp.getTraitSet, inputs, setOp.all)
}
private def gatherIndicesToMaterialize(aggregate: Aggregate, input: RelNode): Set[Int] = {
val indicesToMaterialize = mutable.Set[Int]()
// check arguments of agg calls
aggregate.getAggCallList.foreach(call => if (call.getArgList.size() == 0) {
// count(*) has an empty argument list
(0 until input.getRowType.getFieldCount).foreach(indicesToMaterialize.add)
} else {
// for other aggregations
call.getArgList.map(_.asInstanceOf[Int]).foreach(indicesToMaterialize.add)
})
// check grouping sets
aggregate.getGroupSets.foreach(set =>
set.asList().map(_.asInstanceOf[Int]).foreach(indicesToMaterialize.add)
)
indicesToMaterialize.toSet
}
private def hasRowtimeAttribute(rowType: RelDataType): Boolean = {
rowType.getFieldList.exists(field => isRowtimeIndicatorType(field.getType))
}
private def convertSinkInput(sinkInput: RelNode): RelNode = {
var newInput = sinkInput.accept(this)
var needsConversion = false
val projects = newInput.getRowType.getFieldList.map { field =>
if (isProctimeIndicatorType(field.getType)) {
needsConversion = true
rexBuilder.makeCall(FlinkSqlOperatorTable.PROCTIME_MATERIALIZE,
new RexInputRef(field.getIndex, field.getType))
} else {
new RexInputRef(field.getIndex, field.getType)
}
}
// add final conversion if necessary
if (needsConversion) {
LogicalProject.create(newInput, JCollections.emptyList[RelHint](),
projects, newInput.getRowType.getFieldNames)
} else {
newInput
}
}
private def convertAggregate(aggregate: Aggregate): LogicalAggregate = {
// visit children and update inputs
val input = aggregate.getInput.accept(this)
// add a project to materialize aggregation arguments/grouping keys
val refIndices = gatherIndicesToMaterialize(aggregate, input)
val needsMaterialization = refIndices.exists(idx =>
isTimeIndicatorType(input.getRowType.getFieldList.get(idx).getType))
// create project if necessary
val projectedInput = if (needsMaterialization) {
// insert or merge with input project if
// a time attribute is accessed and needs to be materialized
input match {
// merge
case lp: LogicalProject =>
val projects = lp.getProjects.zipWithIndex.map { case (expr, idx) =>
if (isTimeIndicatorType(expr.getType) && refIndices.contains(idx)) {
if (isRowtimeIndicatorType(expr.getType)) {
// cast rowtime indicator to regular timestamp
rexBuilder.makeAbstractCast(timestamp(expr.getType.isNullable), expr)
} else {
// generate proctime access
rexBuilder.makeCall(FlinkSqlOperatorTable.PROCTIME_MATERIALIZE, expr)
}
} else {
expr
}
}
LogicalProject.create(
lp.getInput,
JCollections.emptyList[RelHint](),
projects,
input.getRowType.getFieldNames)
// new project
case _ =>
val projects = input.getRowType.getFieldList.map { field =>
if (isTimeIndicatorType(field.getType) && refIndices.contains(field.getIndex)) {
if (isRowtimeIndicatorType(field.getType)) {
// cast rowtime indicator to regular timestamp
rexBuilder.makeAbstractCast(
timestamp(field.getType.isNullable),
new RexInputRef(field.getIndex, field.getType))
} else {
// generate proctime access
rexBuilder.makeCall(
FlinkSqlOperatorTable.PROCTIME_MATERIALIZE,
new RexInputRef(field.getIndex, field.getType))
}
} else {
new RexInputRef(field.getIndex, field.getType)
}
}
LogicalProject.create(
input,
JCollections.emptyList[RelHint](),
projects,
input.getRowType.getFieldNames)
}
} else {
// no project necessary
input
}
// remove time indicator type as agg call return type
val updatedAggCalls = aggregate.getAggCallList.map { call =>
val callType = if (isTimeIndicatorType(call.getType)) {
timestamp(call.getType.isNullable)
} else {
call.getType
}
AggregateCall.create(
call.getAggregation,
call.isDistinct,
call.getArgList,
call.filterArg,
callType,
call.name)
}
LogicalAggregate.create(
projectedInput,
aggregate.indicator,
aggregate.getGroupSet,
aggregate.getGroupSets,
updatedAggCalls)
}
override def visit(modify: LogicalTableModify): RelNode = {
val input = modify.getInput.accept(this)
modify.copy(modify.getTraitSet, JCollections.singletonList(input))
}
override def visit(calc: LogicalCalc): RelNode = {
calc // Do nothing for Calc now.
}
}
object RelTimeIndicatorConverter {
def convert(
rootRel: RelNode,
rexBuilder: RexBuilder,
needFinalTimeIndicatorConversion: Boolean): RelNode = {
val converter = new RelTimeIndicatorConverter(rexBuilder)
val convertedRoot = rootRel.accept(converter)
// the LogicalSink is converted in RelTimeIndicatorConverter before
if (rootRel.isInstanceOf[LogicalLegacySink] || !needFinalTimeIndicatorConversion) {
return convertedRoot
}
var needsConversion = false
// materialize remaining proctime indicators
val projects = convertedRoot.getRowType.getFieldList.map(field =>
if (isProctimeIndicatorType(field.getType)) {
needsConversion = true
rexBuilder.makeCall(
FlinkSqlOperatorTable.PROCTIME_MATERIALIZE,
new RexInputRef(field.getIndex, field.getType))
} else {
new RexInputRef(field.getIndex, field.getType)
}
)
// add final conversion if necessary
if (needsConversion) {
LogicalProject.create(
convertedRoot,
JCollections.emptyList[RelHint](),
projects,
convertedRoot.getRowType.getFieldNames)
} else {
convertedRoot
}
}
/**
* Materializes time indicator accesses in an expression.
*
* @param expr The expression in which time indicators are materialized.
* @param rowType The input schema of the expression.
* @param rexBuilder A RexBuilder.
* @return The expression with materialized time indicators.
*/
def convertExpression(expr: RexNode, rowType: RelDataType, rexBuilder: RexBuilder): RexNode = {
val materializer = new RexTimeIndicatorMaterializer(
rexBuilder,
rowType.getFieldList.map(_.getType))
expr.accept(materializer)
}
/**
* Checks if the given call is a materialization call for either proctime or rowtime.
*/
def isMaterializationCall(call: RexCall): Boolean = {
val isProctimeCall: Boolean = {
call.getOperator == FlinkSqlOperatorTable.PROCTIME_MATERIALIZE &&
call.getOperands.size() == 1 &&
isProctimeIndicatorType(call.getOperands.get(0).getType)
}
val isRowtimeCall: Boolean = {
call.getOperator == SqlStdOperatorTable.CAST &&
call.getOperands.size() == 1 &&
isRowtimeIndicatorType(call.getOperands.get(0).getType) &&
call.getType.getSqlTypeName == SqlTypeName.TIMESTAMP
}
isProctimeCall || isRowtimeCall
}
}
/**
* Takes `newResolvedInput` types of the [[RexNode]] and if those types have changed rewrites
* the [[RexNode]] to make it consistent with new type.
*/
class RexTimeIndicatorMaterializer(
private val rexBuilder: RexBuilder,
private val input: Seq[RelDataType])
extends RexShuttle {
private def timestamp(isNullable: Boolean): RelDataType = rexBuilder
.getTypeFactory
.asInstanceOf[FlinkTypeFactory]
.createFieldTypeFromLogicalType(new TimestampType(isNullable, 3))
override def visitInputRef(inputRef: RexInputRef): RexNode = {
// reference is interesting
if (isTimeIndicatorType(inputRef.getType)) {
val resolvedRefType = input(inputRef.getIndex)
// input is a valid time indicator
if (isTimeIndicatorType(resolvedRefType)) {
inputRef
}
// input has been materialized
else {
new RexInputRef(inputRef.getIndex, resolvedRefType)
}
}
// reference is a regular field
else {
super.visitInputRef(inputRef)
}
}
private def isMatchTimeIndicator(call: RexNode): Boolean = {
call match {
case operand: RexCall if
operand.getOperator == FlinkSqlOperatorTable.MATCH_PROCTIME ||
operand.getOperator == FlinkSqlOperatorTable.MATCH_ROWTIME =>
true
case _ =>
false
}
}
override def visitCall(call: RexCall): RexNode = {
val updatedCall = super.visitCall(call).asInstanceOf[RexCall]
// materialize operands with time indicators
val materializedOperands = updatedCall.getOperator match {
// skip materialization for special operators
case FlinkSqlOperatorTable.SESSION_OLD |
FlinkSqlOperatorTable.HOP_OLD |
FlinkSqlOperatorTable.TUMBLE_OLD =>
updatedCall.getOperands.toList
case _ =>
updatedCall.getOperands.map { o =>
if (isTimeIndicatorType(o.getType)) {
if (isRowtimeIndicatorType(o.getType)) {
// cast rowtime indicator to regular timestamp
rexBuilder.makeAbstractCast(timestamp(o.getType.isNullable), o)
} else {
// generate proctime access
rexBuilder.makeCall(FlinkSqlOperatorTable.PROCTIME_MATERIALIZE, o)
}
} else {
o
}
}
}
// remove time indicator return type
updatedCall.getOperator match {
// we do not modify AS if operand has not been materialized
case SqlStdOperatorTable.AS if
isTimeIndicatorType(updatedCall.getOperands.get(0).getType) =>
updatedCall
// All calls in MEASURES and DEFINE are wrapped with FINAL/RUNNING, therefore
// we should treat FINAL(MATCH_ROWTIME) and FINAL(MATCH_PROCTIME) as a time attribute
// extraction
case FINAL if updatedCall.getOperands.size() == 1
&& isMatchTimeIndicator(updatedCall.getOperands.get(0)) =>
updatedCall
// do not modify window time attributes
case FlinkSqlOperatorTable.TUMBLE_ROWTIME |
FlinkSqlOperatorTable.TUMBLE_PROCTIME |
FlinkSqlOperatorTable.HOP_ROWTIME |
FlinkSqlOperatorTable.HOP_PROCTIME |
FlinkSqlOperatorTable.SESSION_ROWTIME |
FlinkSqlOperatorTable.SESSION_PROCTIME |
FlinkSqlOperatorTable.MATCH_ROWTIME |
FlinkSqlOperatorTable.MATCH_PROCTIME
// since we materialize groupings on time indicators,
// we cannot check the operands anymore but the return type at least
if isTimeIndicatorType(updatedCall.getType) =>
updatedCall
// materialize function's result and operands
case _ if isTimeIndicatorType(updatedCall.getType) =>
if (updatedCall.getOperator == FlinkSqlOperatorTable.PROCTIME) {
updatedCall
} else {
updatedCall.clone(timestamp(updatedCall.getType.isNullable), materializedOperands)
}
// materialize function's operands only
case _ =>
updatedCall.clone(updatedCall.getType, materializedOperands)
}
}
}
/**
* Helper class for shared logic of materializing time attributes in [[RelNode]] and [[RexNode]].
*/
class RexTimeIndicatorMaterializerUtils(rexBuilder: RexBuilder) {
private def timestamp(isNullable: Boolean): RelDataType = rexBuilder
.getTypeFactory
.asInstanceOf[FlinkTypeFactory]
.createFieldTypeFromLogicalType(new TimestampType(isNullable, 3))
def projectAndMaterializeFields(input: RelNode, indicesToMaterialize: Set[Int]): RelNode = {
val projects = input.getRowType.getFieldList.map { field =>
materializeIfContains(
new RexInputRef(field.getIndex, field.getType),
field.getIndex,
indicesToMaterialize)
}
LogicalProject.create(
input,
JCollections.emptyList[RelHint](),
projects,
input.getRowType.getFieldNames)
}
def getRowTypeWithoutIndicators(
relType: RelDataType,
shouldMaterialize: String => Boolean): RelDataType = {
val outputTypeBuilder = rexBuilder
.getTypeFactory
.asInstanceOf[FlinkTypeFactory]
.builder()
relType.getFieldList.asScala.zipWithIndex.foreach { case (field, idx) =>
if (isTimeIndicatorType(field.getType) && shouldMaterialize(field.getName)) {
outputTypeBuilder.add(field.getName, timestamp(field.getType.isNullable))
} else {
outputTypeBuilder.add(field.getName, field.getType)
}
}
outputTypeBuilder.build()
}
def materializeIfContains(expr: RexNode, index: Int, indicesToMaterialize: Set[Int]): RexNode = {
if (indicesToMaterialize.contains(index)) {
materialize(expr)
} else {
expr
}
}
def materialize(expr: RexNode): RexNode = {
if (isTimeIndicatorType(expr.getType)) {
if (isRowtimeIndicatorType(expr.getType)) {
// cast rowtime indicator to regular timestamp
rexBuilder.makeAbstractCast(timestamp(expr.getType.isNullable), expr)
} else {
// generate proctime access
rexBuilder.makeCall(FlinkSqlOperatorTable.PROCTIME_MATERIALIZE, expr)
}
} else {
expr
}
}
}
| aljoscha/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/calcite/RelTimeIndicatorConverter.scala | Scala | apache-2.0 | 26,358 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.io.File
import java.util.concurrent.atomic.AtomicLong
import scala.collection.JavaConverters._
import org.apache.hadoop.conf.Configuration
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd}
import org.apache.spark.sql.SparkSession.Builder
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.catalyst.expressions.{Expression, NamedExpression}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.execution.QueryExecution
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.spark.sql.hive.execution.command.CarbonSetCommand
import org.apache.spark.sql.internal.{SessionState, SharedState}
import org.apache.spark.sql.optimizer.CarbonFilters
import org.apache.spark.sql.profiler.{Profiler, SQLStart}
import org.apache.spark.util.{CarbonReflectionUtils, Utils}
import org.apache.carbondata.common.annotations.InterfaceAudience
import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.{CarbonProperties, CarbonSessionInfo, ThreadLocalSessionInfo}
import org.apache.carbondata.store.SparkCarbonStore
import org.apache.carbondata.streaming.CarbonStreamingQueryListener
/**
* Session implementation for {org.apache.spark.sql.SparkSession}
* Implemented this class only to use our own SQL DDL commands.
* User needs to use {CarbonSession.getOrCreateCarbon} to create Carbon session.
*/
class CarbonSession(@transient val sc: SparkContext,
@transient private val existingSharedState: Option[SharedState],
@transient private val useHiveMetaStore: Boolean = true
) extends SparkSession(sc) { self =>
def this(sc: SparkContext) {
this(sc, None)
}
@transient
override lazy val sessionState: SessionState = {
CarbonReflectionUtils.getSessionState(sparkContext, this, useHiveMetaStore)
.asInstanceOf[SessionState]
}
/**
* State shared across sessions, including the `SparkContext`, cached data, listener,
* and a catalog that interacts with external systems.
*/
@transient
override lazy val sharedState: SharedState = {
existingSharedState match {
case Some(_) =>
val ss = existingSharedState.get
if (ss == null) {
new SharedState(sparkContext)
} else {
ss
}
case None =>
new SharedState(sparkContext)
}
}
override def newSession(): SparkSession = {
new CarbonSession(sparkContext, Some(sharedState), useHiveMetaStore)
}
/**
* Run search mode if enabled, otherwise run SparkSQL
*/
override def sql(sqlText: String): DataFrame = {
withProfiler(
sqlText,
(qe, sse) => {
if (isSearchModeEnabled) {
try {
trySearchMode(qe, sse)
} catch {
case e: Exception =>
log.error(String.format(
"Exception when executing search mode: %s", e.getMessage))
throw e;
}
} else {
new Dataset[Row](self, qe, RowEncoder(qe.analyzed.schema))
}
}
)
}
/**
* Return true if the specified sql statement will hit the datamap
* This API is for test purpose only
*/
@InterfaceAudience.Developer(Array("DataMap"))
def isDataMapHit(sqlStatement: String, dataMapName: String): Boolean = {
val message = sql(s"EXPLAIN $sqlStatement").collect()
message(0).getString(0).contains(dataMapName)
}
def isSearchModeEnabled: Boolean = carbonStore != null
/**
* Run SparkSQL directly
*/
def sparkSql(sqlText: String): DataFrame = {
withProfiler(
sqlText,
(qe, sse) => new Dataset[Row](self, qe, RowEncoder(qe.analyzed.schema))
)
}
private def withProfiler(
sqlText: String,
generateDF: (QueryExecution, SQLStart) => DataFrame): DataFrame = {
val sse = SQLStart(sqlText, CarbonSession.statementId.getAndIncrement())
CarbonSession.threadStatementId.set(sse.statementId)
sse.startTime = System.currentTimeMillis()
try {
val logicalPlan = sessionState.sqlParser.parsePlan(sqlText)
sse.parseEnd = System.currentTimeMillis()
val qe = sessionState.executePlan(logicalPlan)
qe.assertAnalyzed()
sse.isCommand = qe.analyzed match {
case c: Command => true
case u @ Union(children) if children.forall(_.isInstanceOf[Command]) => true
case _ => false
}
sse.analyzerEnd = System.currentTimeMillis()
generateDF(qe, sse)
} finally {
Profiler.invokeIfEnable {
if (sse.isCommand) {
sse.endTime = System.currentTimeMillis()
Profiler.send(sse)
} else {
Profiler.addStatementMessage(sse.statementId, sse)
}
}
}
}
/**
* If the query is a simple query with filter, we will try to use Search Mode,
* otherwise execute in SparkSQL
*/
private def trySearchMode(qe: QueryExecution, sse: SQLStart): DataFrame = {
val analyzed = qe.analyzed
val LOG: LogService = LogServiceFactory.getLogService(this.getClass.getName)
analyzed match {
case _@Project(columns, _@Filter(expr, s: SubqueryAlias))
if s.child.isInstanceOf[LogicalRelation] &&
s.child.asInstanceOf[LogicalRelation].relation
.isInstanceOf[CarbonDatasourceHadoopRelation] =>
LOG.info(s"Search service started and supports filter: ${sse.sqlText}")
runSearch(analyzed, columns, expr, s.child.asInstanceOf[LogicalRelation])
case gl@GlobalLimit(_, ll@LocalLimit(_, p@Project(columns, _@Filter(expr, s: SubqueryAlias))))
if s.child.isInstanceOf[LogicalRelation] &&
s.child.asInstanceOf[LogicalRelation].relation
.isInstanceOf[CarbonDatasourceHadoopRelation] =>
val logicalRelation = s.child.asInstanceOf[LogicalRelation]
LOG.info(s"Search service started and supports limit: ${sse.sqlText}")
runSearch(analyzed, columns, expr, logicalRelation, gl.maxRows, ll.maxRows)
case _ =>
LOG.info(s"Search service started, but don't support: ${sse.sqlText}," +
s" and will run it with SparkSQL")
new Dataset[Row](self, qe, RowEncoder(qe.analyzed.schema))
}
}
@transient private var carbonStore: SparkCarbonStore = _
def startSearchMode(): Unit = {
CarbonProperties.enableSearchMode(true)
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_VECTOR_READER, "false")
if (carbonStore == null) {
carbonStore = new SparkCarbonStore(this)
carbonStore.startSearchMode()
}
}
def stopSearchMode(): Unit = {
CarbonProperties.enableSearchMode(false)
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_VECTOR_READER, "true")
if (carbonStore != null) {
try {
carbonStore.stopSearchMode()
carbonStore = null
} catch {
case e: RuntimeException =>
LogServiceFactory.getLogService(this.getClass.getCanonicalName)
.error(s"Stop search mode failed: ${e.getMessage}")
}
}
}
private def runSearch(
logicalPlan: LogicalPlan,
columns: Seq[NamedExpression],
expr: Expression,
relation: LogicalRelation,
maxRows: Option[Long] = None,
localMaxRows: Option[Long] = None): DataFrame = {
val rows = carbonStore.search(
relation.relation.asInstanceOf[CarbonDatasourceHadoopRelation].carbonTable,
columns.map(_.name).toArray,
if (expr != null) CarbonFilters.transformExpression(expr) else null,
maxRows.getOrElse(Long.MaxValue),
localMaxRows.getOrElse(Long.MaxValue))
val output = new java.util.ArrayList[Row]()
while (rows.hasNext) {
val row = rows.next()
output.add(Row.fromSeq(row.getData))
}
createDataFrame(output, logicalPlan.schema)
}
}
object CarbonSession {
private val statementId = new AtomicLong(0)
private var enableInMemCatlog: Boolean = false
private[sql] val threadStatementId = new ThreadLocal[Long]()
implicit class CarbonBuilder(builder: Builder) {
def enableInMemoryCatalog(): Builder = {
enableInMemCatlog = true
builder
}
def getOrCreateCarbonSession(): SparkSession = {
getOrCreateCarbonSession(null, null)
}
def getOrCreateCarbonSession(storePath: String): SparkSession = {
getOrCreateCarbonSession(
storePath,
new File(CarbonCommonConstants.METASTORE_LOCATION_DEFAULT_VAL).getCanonicalPath)
}
def getOrCreateCarbonSession(storePath: String,
metaStorePath: String): SparkSession = synchronized {
if (!enableInMemCatlog) {
builder.enableHiveSupport()
}
val options =
getValue("options", builder).asInstanceOf[scala.collection.mutable.HashMap[String, String]]
val userSuppliedContext: Option[SparkContext] =
getValue("userSuppliedContext", builder).asInstanceOf[Option[SparkContext]]
if (metaStorePath != null) {
val hadoopConf = new Configuration()
val configFile = Utils.getContextOrSparkClassLoader.getResource("hive-site.xml")
if (configFile != null) {
hadoopConf.addResource(configFile)
}
if (options.get(CarbonCommonConstants.HIVE_CONNECTION_URL).isEmpty &&
hadoopConf.get(CarbonCommonConstants.HIVE_CONNECTION_URL) == null) {
val metaStorePathAbsolute = new File(metaStorePath).getCanonicalPath
val hiveMetaStoreDB = metaStorePathAbsolute + "/metastore_db"
options ++= Map[String, String]((CarbonCommonConstants.HIVE_CONNECTION_URL,
s"jdbc:derby:;databaseName=$hiveMetaStoreDB;create=true"))
}
}
// Get the session from current thread's active session.
var session: SparkSession = SparkSession.getActiveSession match {
case Some(sparkSession: CarbonSession) =>
if ((sparkSession ne null) && !sparkSession.sparkContext.isStopped) {
options.foreach { case (k, v) => sparkSession.sessionState.conf.setConfString(k, v) }
sparkSession
} else {
null
}
case _ => null
}
if (session ne null) {
return session
}
// Global synchronization so we will only set the default session once.
SparkSession.synchronized {
// If the current thread does not have an active session, get it from the global session.
session = SparkSession.getDefaultSession match {
case Some(sparkSession: CarbonSession) =>
if ((sparkSession ne null) && !sparkSession.sparkContext.isStopped) {
options.foreach { case (k, v) => sparkSession.sessionState.conf.setConfString(k, v) }
sparkSession
} else {
null
}
case _ => null
}
if (session ne null) {
return session
}
// No active nor global default session. Create a new one.
val sparkContext = userSuppliedContext.getOrElse {
// set app name if not given
val randomAppName = java.util.UUID.randomUUID().toString
val sparkConf = new SparkConf()
options.foreach { case (k, v) => sparkConf.set(k, v) }
if (!sparkConf.contains("spark.app.name")) {
sparkConf.setAppName(randomAppName)
}
val sc = SparkContext.getOrCreate(sparkConf)
// maybe this is an existing SparkContext, update its SparkConf which maybe used
// by SparkSession
options.foreach { case (k, v) => sc.conf.set(k, v) }
if (!sc.conf.contains("spark.app.name")) {
sc.conf.setAppName(randomAppName)
}
sc
}
session = new CarbonSession(sparkContext, None, !enableInMemCatlog)
val carbonProperties = CarbonProperties.getInstance()
if (storePath != null) {
carbonProperties.addProperty(CarbonCommonConstants.STORE_LOCATION, storePath)
// In case if it is in carbon.properties for backward compatible
} else if (carbonProperties.getProperty(CarbonCommonConstants.STORE_LOCATION) == null) {
carbonProperties.addProperty(CarbonCommonConstants.STORE_LOCATION,
session.sessionState.conf.warehousePath)
}
options.foreach { case (k, v) => session.sessionState.conf.setConfString(k, v) }
SparkSession.setDefaultSession(session)
// Setup monitor end point and register CarbonMonitorListener
Profiler.initialize(sparkContext)
// Register a successfully instantiated context to the singleton. This should be at the
// end of the class definition so that the singleton is updated only if there is no
// exception in the construction of the instance.
CarbonToSparkAdapater.addSparkListener(sparkContext)
session.streams.addListener(new CarbonStreamingQueryListener(session))
}
session
}
/**
* It is a hack to get the private field from class.
*/
def getValue(name: String, builder: Builder): Any = {
val currentMirror = scala.reflect.runtime.currentMirror
val instanceMirror = currentMirror.reflect(builder)
val m = currentMirror.classSymbol(builder.getClass).
toType.members.find { p =>
p.name.toString.equals(name)
}.get.asTerm
instanceMirror.reflectField(m).get
}
}
def threadSet(key: String, value: String): Unit = {
var currentThreadSessionInfo = ThreadLocalSessionInfo.getCarbonSessionInfo
if (currentThreadSessionInfo == null) {
currentThreadSessionInfo = new CarbonSessionInfo()
}
else {
currentThreadSessionInfo = currentThreadSessionInfo.clone()
}
val threadParams = currentThreadSessionInfo.getThreadParams
CarbonSetCommand.validateAndSetValue(threadParams, key, value)
ThreadLocalSessionInfo.setCarbonSessionInfo(currentThreadSessionInfo)
}
def threadSet(key: String, value: Object): Unit = {
var currentThreadSessionInfo = ThreadLocalSessionInfo.getCarbonSessionInfo
if (currentThreadSessionInfo == null) {
currentThreadSessionInfo = new CarbonSessionInfo()
}
else {
currentThreadSessionInfo = currentThreadSessionInfo.clone()
}
currentThreadSessionInfo.getThreadParams.setExtraInfo(key, value)
ThreadLocalSessionInfo.setCarbonSessionInfo(currentThreadSessionInfo)
}
def threadUnset(key: String): Unit = {
val currentThreadSessionInfo = ThreadLocalSessionInfo.getCarbonSessionInfo
if (currentThreadSessionInfo != null) {
val currentThreadSessionInfoClone = currentThreadSessionInfo.clone()
val threadParams = currentThreadSessionInfoClone.getThreadParams
CarbonSetCommand.unsetValue(threadParams, key)
threadParams.removeExtraInfo(key)
ThreadLocalSessionInfo.setCarbonSessionInfo(currentThreadSessionInfoClone)
}
}
def updateSessionInfoToCurrentThread(sparkSession: SparkSession): Unit = {
val carbonSessionInfo = CarbonEnv.getInstance(sparkSession).carbonSessionInfo.clone()
val currentThreadSessionInfoOrig = ThreadLocalSessionInfo.getCarbonSessionInfo
if (currentThreadSessionInfoOrig != null) {
val currentThreadSessionInfo = currentThreadSessionInfoOrig.clone()
// copy all the thread parameters to apply to session parameters
currentThreadSessionInfo.getThreadParams.getAll.asScala
.foreach(entry => carbonSessionInfo.getSessionParams.addProperty(entry._1, entry._2))
carbonSessionInfo.setThreadParams(currentThreadSessionInfo.getThreadParams)
}
// preserve thread parameters across call
ThreadLocalSessionInfo.setCarbonSessionInfo(carbonSessionInfo)
ThreadLocalSessionInfo
.setConfigurationToCurrentThread(sparkSession.sessionState.newHadoopConf())
}
}
| sgururajshetty/carbondata | integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala | Scala | apache-2.0 | 16,816 |
package chapter.nine
object ExerciseSeven extends App {
}
| deekim/impatient-scala | src/main/scala/chapter/nine/ExerciseSeven.scala | Scala | apache-2.0 | 61 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.utils
import junit.framework.Assert._
import org.junit.Test
class CommandLineUtilsTest {
@Test
def testParseEmptyArg() {
val argArray = Array("my.empty.property=")
val props = CommandLineUtils.parseKeyValueArgs(argArray)
assertEquals("Empty value should be equal to empty string",props.getProperty("my.empty.property"),"")
}
@Test
def testParseSingleArg() {
val argArray = Array("my.property=value")
val props = CommandLineUtils.parseKeyValueArgs(argArray)
assertEquals("Value of a single property should be 'value' ",props.getProperty("my.property"),"value")
}
@Test
def testParseArgs() {
val argArray = Array("first.property=first","second.property=second")
val props = CommandLineUtils.parseKeyValueArgs(argArray)
assertEquals("Value of first property should be 'first'",props.getProperty("first.property"),"first")
assertEquals("Value of second property should be 'second'",props.getProperty("second.property"),"second")
}
}
| tempbottle/kafka | core/src/test/scala/unit/kafka/utils/CommandLineUtilsTest.scala | Scala | apache-2.0 | 1,811 |
package org.jetbrains.plugins.scala.debugger
import com.intellij.debugger.NoDataException
import com.intellij.debugger.engine.{ExtraSteppingFilter, SuspendContext}
import com.intellij.psi.PsiElement
import com.sun.jdi.Location
import com.sun.jdi.request.StepRequest
import org.jetbrains.plugins.scala.ScalaLanguage
import org.jetbrains.plugins.scala.debugger.evaluation.ScalaEvaluatorBuilderUtil
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil
/**
* @author Nikolay.Tropin
*/
class ScalaSyntheticSteppingFilter extends ExtraSteppingFilter {
override def isApplicable(context: SuspendContext): Boolean = {
val debugProcess = context.getDebugProcess
val positionManager = new ScalaPositionManager(debugProcess)
val location = context.getFrameProxy.location()
isSyntheticMethod(location, positionManager)
}
override def getStepRequestDepth(context: SuspendContext): Int = StepRequest.STEP_INTO
private def isSyntheticMethod(location: Location, positionManager: ScalaPositionManager): Boolean = {
val name = location.method().name()
if (name.endsWith("$lzycompute")) return false //should step into the body of a lazy val
if (positionManager.shouldSkip(location)) return true
if (name.startsWith("apply")) return false
if (name == "<init>") return false
inReadAction {
val sourcePosition =
try {
positionManager.getSourcePosition(location)
} catch {
case _: NoDataException => return false
}
if (!sourcePosition.getFile.getLanguage.is(ScalaLanguage.Instance)) return false
val classInSource = ScalaPositionManager.findGeneratingClassOrMethodParent(sourcePosition.getElementAt)
if (classInSource == null) return false
!existsInSource(name, classInSource)
}
}
private def existsInSource(name: String, td: PsiElement): Boolean = {
td.depthFirst(elem => elem == td || !ScalaEvaluatorBuilderUtil.isGenerateClass(elem)).exists {
case fun: ScFunction if fun.isLocal => name.contains(ScalaNamesUtil.toJavaName(fun.name))
case fun: ScFunction => ScalaNamesUtil.toJavaName(fun.name) == name
case _ => false
}
}
}
| SergeevPavel/intellij-scala | src/org/jetbrains/plugins/scala/debugger/ScalaSyntheticSteppingFilter.scala | Scala | apache-2.0 | 2,314 |
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.scalate
package rest
import collection.immutable.Map
import java.io.File
import support.DefaultTemplatePackage
class DefaultTemplatePackageTest extends TemplateTestSupport {
test("template using 'it' attribute auto defined by ScalatePackage") {
val attributes = Map("it" -> SomeObject("James", "Mells"))
assertUriOutputContains("/org/fusesource/scalate/rest/SomeObject.index.ssp", attributes,
"name: James town: Mells")
}
override protected def beforeAll(configMap: Map[String, Any]) = {
super.beforeAll(configMap)
engine.sourceDirectories = List(new File(baseDir, "src/test/resources"))
}
}
case class SomeObject(name: String, town: String) {
}
class SamplePackage extends DefaultTemplatePackage {
} | dnatic09/scalate | scalate-core/src/test/scala/org/fusesource/scalate/rest/DefaultTemplatePackageTest.scala | Scala | apache-2.0 | 1,498 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.matchers.dsl
import org.scalatest._
import Matchers._
class StartWithWordSpec extends FreeSpec with FileMocks {
"StartWithWord " - {
"should have pretty toString" in {
startWith.toString should be ("startWith")
}
"apply(String) method returns Matcher" - {
val mt = startWith ("Pr")
"should have pretty toString" in {
mt.toString should be ("startWith (\\"Pr\\")")
}
val mr = mt("Programmer")
"should have correct MatcherResult" in {
mr.matches shouldBe true
mr.failureMessage shouldBe "\\"Programmer\\" did not start with substring \\"Pr\\""
mr.negatedFailureMessage shouldBe "\\"Programmer\\" started with substring \\"Pr\\""
mr.midSentenceFailureMessage shouldBe "\\"Programmer\\" did not start with substring \\"Pr\\""
mr.midSentenceNegatedFailureMessage shouldBe "\\"Programmer\\" started with substring \\"Pr\\""
mr.rawFailureMessage shouldBe "{0} did not start with substring {1}"
mr.rawNegatedFailureMessage shouldBe "{0} started with substring {1}"
mr.rawMidSentenceFailureMessage shouldBe "{0} did not start with substring {1}"
mr.rawMidSentenceNegatedFailureMessage shouldBe "{0} started with substring {1}"
mr.failureMessageArgs shouldBe Vector("Programmer", "Pr")
mr.negatedFailureMessageArgs shouldBe Vector("Programmer", "Pr")
mr.midSentenceFailureMessageArgs shouldBe Vector("Programmer", "Pr")
mr.midSentenceNegatedFailureMessageArgs shouldBe Vector("Programmer", "Pr")
}
val nmr = mr.negated
"should have correct negated MatcherResult" in {
nmr.matches shouldBe false
nmr.failureMessage shouldBe "\\"Programmer\\" started with substring \\"Pr\\""
nmr.negatedFailureMessage shouldBe "\\"Programmer\\" did not start with substring \\"Pr\\""
nmr.midSentenceFailureMessage shouldBe "\\"Programmer\\" started with substring \\"Pr\\""
nmr.midSentenceNegatedFailureMessage shouldBe "\\"Programmer\\" did not start with substring \\"Pr\\""
nmr.rawFailureMessage shouldBe "{0} started with substring {1}"
nmr.rawNegatedFailureMessage shouldBe "{0} did not start with substring {1}"
nmr.rawMidSentenceFailureMessage shouldBe "{0} started with substring {1}"
nmr.rawMidSentenceNegatedFailureMessage shouldBe "{0} did not start with substring {1}"
nmr.failureMessageArgs shouldBe Vector("Programmer", "Pr")
nmr.negatedFailureMessageArgs shouldBe Vector("Programmer", "Pr")
nmr.midSentenceFailureMessageArgs shouldBe Vector("Programmer", "Pr")
nmr.midSentenceNegatedFailureMessageArgs shouldBe Vector("Programmer", "Pr")
}
}
"regex(String) method returns Matcher" - {
val decimal = """(-)?(\\d+)(\\.\\d*)?"""
val mt = startWith regex decimal
"should have pretty toString" in {
mt.toString should be ("startWith regex " + decimal)
}
val mr = mt("2.7b")
"should have correct MatcherResult" in {
mr.matches shouldBe true
mr.failureMessage shouldBe "\\"2.7b\\" did not start with a substring that matched the regular expression " + decimal
mr.negatedFailureMessage shouldBe "\\"2.7b\\" started with a substring that matched the regular expression " + decimal
mr.midSentenceFailureMessage shouldBe "\\"2.7b\\" did not start with a substring that matched the regular expression " + decimal
mr.midSentenceNegatedFailureMessage shouldBe "\\"2.7b\\" started with a substring that matched the regular expression " + decimal
mr.rawFailureMessage shouldBe "{0} did not start with a substring that matched the regular expression {1}"
mr.rawNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}"
mr.rawMidSentenceFailureMessage shouldBe "{0} did not start with a substring that matched the regular expression {1}"
mr.rawMidSentenceNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}"
mr.failureMessageArgs shouldBe Vector("2.7b", UnquotedString(decimal))
mr.negatedFailureMessageArgs shouldBe Vector("2.7b", UnquotedString(decimal))
mr.midSentenceFailureMessageArgs shouldBe Vector("2.7b", UnquotedString(decimal))
mr.midSentenceNegatedFailureMessageArgs shouldBe Vector("2.7b", UnquotedString(decimal))
}
val nmr = mr.negated
"should have correct negated MatcherResult" in {
nmr.matches shouldBe false
nmr.failureMessage shouldBe "\\"2.7b\\" started with a substring that matched the regular expression " + decimal
nmr.negatedFailureMessage shouldBe "\\"2.7b\\" did not start with a substring that matched the regular expression " + decimal
nmr.midSentenceFailureMessage shouldBe "\\"2.7b\\" started with a substring that matched the regular expression " + decimal
nmr.midSentenceNegatedFailureMessage shouldBe "\\"2.7b\\" did not start with a substring that matched the regular expression " + decimal
nmr.rawFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}"
nmr.rawNegatedFailureMessage shouldBe "{0} did not start with a substring that matched the regular expression {1}"
nmr.rawMidSentenceFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}"
nmr.rawMidSentenceNegatedFailureMessage shouldBe "{0} did not start with a substring that matched the regular expression {1}"
nmr.failureMessageArgs shouldBe Vector("2.7b", UnquotedString(decimal))
nmr.negatedFailureMessageArgs shouldBe Vector("2.7b", UnquotedString(decimal))
nmr.midSentenceFailureMessageArgs shouldBe Vector("2.7b", UnquotedString(decimal))
nmr.midSentenceNegatedFailureMessageArgs shouldBe Vector("2.7b", UnquotedString(decimal))
}
}
"regex(Regex) method returns Matcher" - {
val decimal = """(-)?(\\d+)(\\.\\d*)?"""
val mt = startWith regex decimal.r
"should have pretty toString" in {
mt.toString should be ("startWith regex " + decimal)
}
val mr = mt("2.7b")
"should have correct MatcherResult" in {
mr.matches shouldBe true
mr.failureMessage shouldBe "\\"2.7b\\" did not start with a substring that matched the regular expression " + decimal
mr.negatedFailureMessage shouldBe "\\"2.7b\\" started with a substring that matched the regular expression " + decimal
mr.midSentenceFailureMessage shouldBe "\\"2.7b\\" did not start with a substring that matched the regular expression " + decimal
mr.midSentenceNegatedFailureMessage shouldBe "\\"2.7b\\" started with a substring that matched the regular expression " + decimal
mr.rawFailureMessage shouldBe "{0} did not start with a substring that matched the regular expression {1}"
mr.rawNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}"
mr.rawMidSentenceFailureMessage shouldBe "{0} did not start with a substring that matched the regular expression {1}"
mr.rawMidSentenceNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}"
mr.failureMessageArgs shouldBe Vector("2.7b", UnquotedString(decimal))
mr.negatedFailureMessageArgs shouldBe Vector("2.7b", UnquotedString(decimal))
mr.midSentenceFailureMessageArgs shouldBe Vector("2.7b", UnquotedString(decimal))
mr.midSentenceNegatedFailureMessageArgs shouldBe Vector("2.7b", UnquotedString(decimal))
}
val nmr = mr.negated
"should have correct negated MatcherResult" in {
nmr.matches shouldBe false
nmr.failureMessage shouldBe "\\"2.7b\\" started with a substring that matched the regular expression " + decimal
nmr.negatedFailureMessage shouldBe "\\"2.7b\\" did not start with a substring that matched the regular expression " + decimal
nmr.midSentenceFailureMessage shouldBe "\\"2.7b\\" started with a substring that matched the regular expression " + decimal
nmr.midSentenceNegatedFailureMessage shouldBe "\\"2.7b\\" did not start with a substring that matched the regular expression " + decimal
nmr.rawFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}"
nmr.rawNegatedFailureMessage shouldBe "{0} did not start with a substring that matched the regular expression {1}"
nmr.rawMidSentenceFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}"
nmr.rawMidSentenceNegatedFailureMessage shouldBe "{0} did not start with a substring that matched the regular expression {1}"
nmr.failureMessageArgs shouldBe Vector("2.7b", UnquotedString(decimal))
nmr.negatedFailureMessageArgs shouldBe Vector("2.7b", UnquotedString(decimal))
nmr.midSentenceFailureMessageArgs shouldBe Vector("2.7b", UnquotedString(decimal))
nmr.midSentenceNegatedFailureMessageArgs shouldBe Vector("2.7b", UnquotedString(decimal))
}
}
"regex(a(b*)c withGroup bb) method returns Matcher" - {
val bb = "bb"
val mt = startWith regex ("""a(b*)c""" withGroup bb)
"should have pretty toString" in {
mt.toString should be ("startWith regex \\"a(b*)c\\" withGroup (\\"" + bb + "\\")")
}
val mr1 = mt("abbc")
"when apply with \\"abbc\\"" - {
"should have correct MatcherResult" in {
mr1.matches shouldBe true
mr1.failureMessage shouldBe "\\"abbc\\" started with a substring that matched the regular expression a(b*)c, but \\"bb\\" did not match group bb"
mr1.negatedFailureMessage shouldBe "\\"abbc\\" started with a substring that matched the regular expression a(b*)c and group bb"
mr1.midSentenceFailureMessage shouldBe "\\"abbc\\" started with a substring that matched the regular expression a(b*)c, but \\"bb\\" did not match group bb"
mr1.midSentenceNegatedFailureMessage shouldBe "\\"abbc\\" started with a substring that matched the regular expression a(b*)c and group bb"
mr1.rawFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}, but {2} did not match group {3}"
mr1.rawNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1} and group {2}"
mr1.rawMidSentenceFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}, but {2} did not match group {3}"
mr1.rawMidSentenceNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1} and group {2}"
mr1.failureMessageArgs shouldBe Vector("abbc", UnquotedString("a(b*)c"), "bb", UnquotedString("bb"))
mr1.negatedFailureMessageArgs shouldBe Vector("abbc", UnquotedString("a(b*)c"), UnquotedString("bb"))
mr1.midSentenceFailureMessageArgs shouldBe Vector("abbc", UnquotedString("a(b*)c"), "bb", UnquotedString("bb"))
mr1.midSentenceNegatedFailureMessageArgs shouldBe Vector("abbc", UnquotedString("a(b*)c"), UnquotedString("bb"))
}
val nmr = mr1.negated
"should have correct negated MatcherResult" in {
nmr.matches shouldBe false
nmr.failureMessage shouldBe "\\"abbc\\" started with a substring that matched the regular expression a(b*)c and group bb"
nmr.negatedFailureMessage shouldBe "\\"abbc\\" started with a substring that matched the regular expression a(b*)c, but \\"bb\\" did not match group bb"
nmr.midSentenceFailureMessage shouldBe "\\"abbc\\" started with a substring that matched the regular expression a(b*)c and group bb"
nmr.midSentenceNegatedFailureMessage shouldBe "\\"abbc\\" started with a substring that matched the regular expression a(b*)c, but \\"bb\\" did not match group bb"
nmr.rawFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1} and group {2}"
nmr.rawNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}, but {2} did not match group {3}"
nmr.rawMidSentenceFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1} and group {2}"
nmr.rawMidSentenceNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}, but {2} did not match group {3}"
nmr.failureMessageArgs shouldBe Vector("abbc", UnquotedString("a(b*)c"), UnquotedString("bb"))
nmr.negatedFailureMessageArgs shouldBe Vector("abbc", UnquotedString("a(b*)c"), "bb", UnquotedString("bb"))
nmr.midSentenceFailureMessageArgs shouldBe Vector("abbc", UnquotedString("a(b*)c"), UnquotedString("bb"))
nmr.midSentenceNegatedFailureMessageArgs shouldBe Vector("abbc", UnquotedString("a(b*)c"), "bb", UnquotedString("bb"))
}
}
val mr2 = mt("abbbc")
"when apply with \\"abbbc\\"" - {
"should have correct MatcherResult" in {
mr2.matches shouldBe false
mr2.failureMessage shouldBe "\\"abbbc\\" started with a substring that matched the regular expression a(b*)c, but \\"bbb\\" did not match group bb"
mr2.negatedFailureMessage shouldBe "\\"abbbc\\" started with a substring that matched the regular expression a(b*)c and group bb"
mr2.midSentenceFailureMessage shouldBe "\\"abbbc\\" started with a substring that matched the regular expression a(b*)c, but \\"bbb\\" did not match group bb"
mr2.midSentenceNegatedFailureMessage shouldBe "\\"abbbc\\" started with a substring that matched the regular expression a(b*)c and group bb"
mr2.rawFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}, but {2} did not match group {3}"
mr2.rawNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1} and group {2}"
mr2.rawMidSentenceFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}, but {2} did not match group {3}"
mr2.rawMidSentenceNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1} and group {2}"
mr2.failureMessageArgs shouldBe Vector("abbbc", UnquotedString("a(b*)c"), "bbb", UnquotedString("bb"))
mr2.negatedFailureMessageArgs shouldBe Vector("abbbc", UnquotedString("a(b*)c"), UnquotedString("bb"))
mr2.midSentenceFailureMessageArgs shouldBe Vector("abbbc", UnquotedString("a(b*)c"), "bbb", UnquotedString("bb"))
mr2.midSentenceNegatedFailureMessageArgs shouldBe Vector("abbbc", UnquotedString("a(b*)c"), UnquotedString("bb"))
}
val nmr = mr2.negated
"should have correct negated MatcherResult" in {
nmr.matches shouldBe true
nmr.failureMessage shouldBe "\\"abbbc\\" started with a substring that matched the regular expression a(b*)c and group bb"
nmr.negatedFailureMessage shouldBe "\\"abbbc\\" started with a substring that matched the regular expression a(b*)c, but \\"bbb\\" did not match group bb"
nmr.midSentenceFailureMessage shouldBe "\\"abbbc\\" started with a substring that matched the regular expression a(b*)c and group bb"
nmr.midSentenceNegatedFailureMessage shouldBe "\\"abbbc\\" started with a substring that matched the regular expression a(b*)c, but \\"bbb\\" did not match group bb"
nmr.rawFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1} and group {2}"
nmr.rawNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}, but {2} did not match group {3}"
nmr.rawMidSentenceFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1} and group {2}"
nmr.rawMidSentenceNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}, but {2} did not match group {3}"
nmr.failureMessageArgs shouldBe Vector("abbbc", UnquotedString("a(b*)c"), UnquotedString("bb"))
nmr.negatedFailureMessageArgs shouldBe Vector("abbbc", UnquotedString("a(b*)c"), "bbb", UnquotedString("bb"))
nmr.midSentenceFailureMessageArgs shouldBe Vector("abbbc", UnquotedString("a(b*)c"), UnquotedString("bb"))
nmr.midSentenceNegatedFailureMessageArgs shouldBe Vector("abbbc", UnquotedString("a(b*)c"), "bbb", UnquotedString("bb"))
}
}
val mr3 = mt("ABBC")
"when apply with \\"ABBC\\"" - {
"should have correct MatcherResult" in {
mr3.matches shouldBe false
mr3.failureMessage shouldBe "\\"ABBC\\" did not start with a substring that matched the regular expression a(b*)c"
mr3.negatedFailureMessage shouldBe "\\"ABBC\\" started with a substring that matched the regular expression a(b*)c"
mr3.midSentenceFailureMessage shouldBe "\\"ABBC\\" did not start with a substring that matched the regular expression a(b*)c"
mr3.midSentenceNegatedFailureMessage shouldBe "\\"ABBC\\" started with a substring that matched the regular expression a(b*)c"
mr3.rawFailureMessage shouldBe "{0} did not start with a substring that matched the regular expression {1}"
mr3.rawNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}"
mr3.rawMidSentenceFailureMessage shouldBe "{0} did not start with a substring that matched the regular expression {1}"
mr3.rawMidSentenceNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}"
mr3.failureMessageArgs shouldBe Vector("ABBC", UnquotedString("a(b*)c"))
mr3.negatedFailureMessageArgs shouldBe Vector("ABBC", UnquotedString("a(b*)c"))
mr3.midSentenceFailureMessageArgs shouldBe Vector("ABBC", UnquotedString("a(b*)c"))
mr3.midSentenceNegatedFailureMessageArgs shouldBe Vector("ABBC", UnquotedString("a(b*)c"))
}
val nmr = mr3.negated
"should have correct negated MatcherResult" in {
nmr.matches shouldBe true
nmr.failureMessage shouldBe "\\"ABBC\\" started with a substring that matched the regular expression a(b*)c"
nmr.negatedFailureMessage shouldBe "\\"ABBC\\" did not start with a substring that matched the regular expression a(b*)c"
nmr.midSentenceFailureMessage shouldBe "\\"ABBC\\" started with a substring that matched the regular expression a(b*)c"
nmr.midSentenceNegatedFailureMessage shouldBe "\\"ABBC\\" did not start with a substring that matched the regular expression a(b*)c"
nmr.rawFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}"
nmr.rawNegatedFailureMessage shouldBe "{0} did not start with a substring that matched the regular expression {1}"
nmr.rawMidSentenceFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}"
nmr.rawMidSentenceNegatedFailureMessage shouldBe "{0} did not start with a substring that matched the regular expression {1}"
nmr.failureMessageArgs shouldBe Vector("ABBC", UnquotedString("a(b*)c"))
nmr.negatedFailureMessageArgs shouldBe Vector("ABBC", UnquotedString("a(b*)c"))
nmr.midSentenceFailureMessageArgs shouldBe Vector("ABBC", UnquotedString("a(b*)c"))
nmr.midSentenceNegatedFailureMessageArgs shouldBe Vector("ABBC", UnquotedString("a(b*)c"))
}
}
}
"regex(a(b*)(c*) withGroup bb) method returns Matcher" - {
val bb = "bb"
val cc = "cc"
val mt = startWith regex ("""a(b*)(c*)""" withGroups (bb, cc))
"should have pretty toString" in {
mt.toString should be ("startWith regex \\"a(b*)(c*)\\" withGroups (\\"" + bb + "\\", \\"" + cc + "\\")")
}
val mr = mt("abbccc")
"should have correct MatcherResult" in {
mr.matches shouldBe false
mr.failureMessage shouldBe "\\"abbccc\\" started with a substring that matched the regular expression a(b*)(c*), but \\"ccc\\" did not match group cc at index 1"
mr.negatedFailureMessage shouldBe "\\"abbccc\\" started with a substring that matched the regular expression a(b*)(c*) and group bb, cc"
mr.midSentenceFailureMessage shouldBe "\\"abbccc\\" started with a substring that matched the regular expression a(b*)(c*), but \\"ccc\\" did not match group cc at index 1"
mr.midSentenceNegatedFailureMessage shouldBe "\\"abbccc\\" started with a substring that matched the regular expression a(b*)(c*) and group bb, cc"
mr.rawFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}, but {2} did not match group {3} at index {4}"
mr.rawNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1} and group {2}"
mr.rawMidSentenceFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}, but {2} did not match group {3} at index {4}"
mr.rawMidSentenceNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1} and group {2}"
mr.failureMessageArgs shouldBe Vector("abbccc", UnquotedString("a(b*)(c*)"), "ccc", UnquotedString("cc"), 1)
mr.negatedFailureMessageArgs shouldBe Vector("abbccc", UnquotedString("a(b*)(c*)"), UnquotedString("bb, cc"))
mr.midSentenceFailureMessageArgs shouldBe Vector("abbccc", UnquotedString("a(b*)(c*)"), "ccc", UnquotedString("cc"), 1)
mr.midSentenceNegatedFailureMessageArgs shouldBe Vector("abbccc", UnquotedString("a(b*)(c*)"), UnquotedString("bb, cc"))
}
val nmr = mr.negated
"should have correct negated MatcherResult" in {
nmr.matches shouldBe true
nmr.failureMessage shouldBe "\\"abbccc\\" started with a substring that matched the regular expression a(b*)(c*) and group bb, cc"
nmr.negatedFailureMessage shouldBe "\\"abbccc\\" started with a substring that matched the regular expression a(b*)(c*), but \\"ccc\\" did not match group cc at index 1"
nmr.midSentenceFailureMessage shouldBe "\\"abbccc\\" started with a substring that matched the regular expression a(b*)(c*) and group bb, cc"
nmr.midSentenceNegatedFailureMessage shouldBe "\\"abbccc\\" started with a substring that matched the regular expression a(b*)(c*), but \\"ccc\\" did not match group cc at index 1"
nmr.rawFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1} and group {2}"
nmr.rawNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}, but {2} did not match group {3} at index {4}"
nmr.rawMidSentenceFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1} and group {2}"
nmr.rawMidSentenceNegatedFailureMessage shouldBe "{0} started with a substring that matched the regular expression {1}, but {2} did not match group {3} at index {4}"
nmr.failureMessageArgs shouldBe Vector("abbccc", UnquotedString("a(b*)(c*)"), UnquotedString("bb, cc"))
nmr.negatedFailureMessageArgs shouldBe Vector("abbccc", UnquotedString("a(b*)(c*)"), "ccc", UnquotedString("cc"), 1)
nmr.midSentenceFailureMessageArgs shouldBe Vector("abbccc", UnquotedString("a(b*)(c*)"), UnquotedString("bb, cc"))
nmr.midSentenceNegatedFailureMessageArgs shouldBe Vector("abbccc", UnquotedString("a(b*)(c*)"), "ccc", UnquotedString("cc"), 1)
}
}
}
}
| dotty-staging/scalatest | scalatest-test/src/test/scala/org/scalatest/matchers/dsl/StartWithWordSpec.scala | Scala | apache-2.0 | 24,887 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.