code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import uk.gov.hmrc.ct.accounts.frs102.retriever.Frs102AccountsBoxRetriever
import uk.gov.hmrc.ct.box._
case class AC120A(value: Option[Int]) extends CtBoxIdentifier(name = "Intangible assets - Goodwill - Amortisation - Amortisation on disposals")
with CtOptionalInteger
with Input
with ValidatableBox[Frs102AccountsBoxRetriever]
with Validators
with Debit {
override def validate(boxRetriever: Frs102AccountsBoxRetriever): Set[CtValidation] = {
collectErrors(
validateMoney(value, min = 0)
)
}
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC120A.scala | Scala | apache-2.0 | 1,178 |
/*
* Copyright 2012-2014 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.xfinity.sirius.api.impl
import org.junit.runner.RunWith
import com.comcast.xfinity.sirius.NiceTest
import org.scalatestplus.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class SiriusStateTest extends NiceTest {
describe("a SiriusState") {
it ("should be able to tell when all subsystems are online") {
val underTest = new SiriusState
assert(false === underTest.areSubsystemsInitialized)
assert(true === underTest.copy(
stateInitialized = true
).areSubsystemsInitialized)
}
}
}
| Comcast/sirius | src/test/scala/com/comcast/xfinity/sirius/api/impl/SiriusStateTest.scala | Scala | apache-2.0 | 1,192 |
package com.alexitc.coinalerts.tasks
import java.math.MathContext
import javax.inject.Inject
import com.alexitc.coinalerts.config.TaskExecutionContext
import com.alexitc.coinalerts.data.async.{FixedPriceAlertFutureDataHandler, UserFutureDataHandler}
import com.alexitc.coinalerts.models._
import com.alexitc.coinalerts.services.EmailMessagesProvider.EmailText
import com.alexitc.coinalerts.services.{EmailMessagesProvider, EmailServiceTrait}
import com.alexitc.coinalerts.tasks.collectors._
import com.alexitc.coinalerts.tasks.models.FixedPriceAlertEvent
import com.alexitc.playsonify.core.FutureOr.Implicits.FutureOps
import org.scalactic.{Bad, Good}
import org.slf4j.LoggerFactory
import play.api.i18n.{Lang, MessagesApi}
import scala.concurrent.Future
import scala.util.control.NonFatal
class FixedPriceAlertsTask @Inject()(
alertCollector: FixedPriceAlertCollector,
bitsoTickerCollector: BitsoTickerCollector,
bittrexAlertCollector: BittrexTickerCollector,
kucoinTickerCollector: KucoinTickerCollector,
binanceTickerCollector: BinanceTickerCollector,
hitbtcTickerCollector: HitbtcTickerCollector,
coinmarketcapTickerCollector: CoinmarketcapTickerCollector,
userDataHandler: UserFutureDataHandler,
alertDataHandler: FixedPriceAlertFutureDataHandler,
emailMessagesProvider: EmailMessagesProvider,
messagesApi: MessagesApi,
emailServiceTrait: EmailServiceTrait)(implicit ec: TaskExecutionContext) {
private val logger = LoggerFactory.getLogger(this.getClass)
private val tickerCollectorList = List(
bitsoTickerCollector,
bittrexAlertCollector,
kucoinTickerCollector,
binanceTickerCollector,
hitbtcTickerCollector,
coinmarketcapTickerCollector)
def execute(): Future[Unit] = {
val futures = tickerCollectorList.map { tickerCollector =>
alertCollector.collect(tickerCollector)
}
Future
.sequence(futures)
.map(_.flatten)
.map(groupByUser)
.flatMap { userAlerts =>
userAlerts.foreach {
case (userId, eventList) => triggerAlerts(userId, eventList)
}
Future.unit
}
}
private def groupByUser(eventList: List[FixedPriceAlertEvent]): Map[UserId, List[FixedPriceAlertEvent]] = {
eventList.groupBy(_.alert.userId)
}
private def triggerAlerts(userId: UserId, eventList: List[FixedPriceAlertEvent]): Future[Unit] = {
val result = for {
user <- userDataHandler.getVerifiedUserById(userId).toFutureOr
preferences <- userDataHandler.getUserPreferences(userId).toFutureOr
_ <- {
val emailSubject = emailMessagesProvider.yourAlertsSubject(preferences.lang)
val emailText = createEmailText(eventList)(preferences.lang)
emailServiceTrait.sendEmail(user.email, emailSubject, emailText).toFutureOr
}
} yield
eventList.foreach { event =>
alertDataHandler.markAsTriggered(event.alert.id)
}
result.toFuture
.map {
case Good(_) => ()
case Bad(errors) =>
logger.error(s"Error while trying to send alerts by email to user = [${userId.string}], errors = [$errors]")
}
.recover {
case NonFatal(ex) =>
logger.error(s"Error while trying to send alerts by email to user = [${userId.string}]", ex)
}
}
private def groupByMarket(eventList: List[FixedPriceAlertEvent]): Map[Exchange, List[FixedPriceAlertEvent]] = {
eventList.groupBy(_.alert.exchange)
}
private def createEmailText(eventList: List[FixedPriceAlertEvent])(implicit lang: Lang): EmailText = {
val text = groupByMarket(eventList)
.map {
case (market, marketEvents) =>
val marketLines = marketEvents.map(createText).mkString("\\n")
s"${market.string}:\\n$marketLines"
}
.mkString("\\n\\n\\n")
emailMessagesProvider.yourFixedPriceAlertsText(text)
}
private def createText(event: FixedPriceAlertEvent)(implicit lang: Lang): String = {
val alert = event.alert
val percentageDifferenceMaybe = alert.basePrice.map { basePrice =>
val percentage = 100 * (1 - basePrice / event.currentPrice)
percentage.abs
}
val messageKey = if (alert.isGreaterThan) {
"message.alert.priceIncreased"
} else {
"message.alert.priceDecreased"
}
val readableCurrency = alert.currencyName
.map { name =>
s"${alert.currency.string} (${name.string})"
}
.getOrElse(alert.currency.string)
val message = messagesApi(messageKey, readableCurrency, event.currentPrice.toString, event.alert.market.string)
percentageDifferenceMaybe
.map { percent =>
val readablePercent = percent.round(new MathContext(4))
s"$message ($readablePercent %)"
}
.getOrElse {
message
}
}
}
| AlexITC/crypto-coin-alerts | alerts-server/app/com/alexitc/coinalerts/tasks/FixedPriceAlertsTask.scala | Scala | gpl-3.0 | 4,813 |
package org.cddb.lsmt
class BlockManager {
}
object BlockManager {
}
| tierex/cddb | core/src/main/scala/org/cddb/lsmt/BlockManager.scala | Scala | apache-2.0 | 74 |
/*
* Copyright (c) 2016. Fengguo (Hugo) Wei and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Detailed contributors are listed in the CONTRIBUTOR.md
*/
package org.argus.cit.intellij.jawa.annotator
import com.intellij.lang.annotation.AnnotationHolder
import com.intellij.psi.PsiElement
import org.argus.cit.intellij.jawa.highlighter.AnnotatorHighlighter
import org.argus.cit.intellij.jawa.lang.psi._
/**
* @author <a href="mailto:fgwei521@gmail.com">Fengguo Wei</a>
*/
class JawaHighlightingAnnotator extends JawaAnnotator {
override def annotate(psiElement: PsiElement, annotationHolder: AnnotationHolder): Unit = psiElement.accept(new JawaVisitor{
override def visitTypeDefSymbol(o: JawaTypeDefSymbol): Unit = {
AnnotatorHighlighter.highlightElement(o, annotationHolder)
super.visitTypeDefSymbol(o)
}
override def visitTypeSymbol(o:JawaTypeSymbol): Unit = {
AnnotatorHighlighter.highlightElement(o, annotationHolder)
super.visitTypeSymbol(o)
}
override def visitVarDefSymbol(o: JawaVarDefSymbol): Unit = {
AnnotatorHighlighter.highlightElement(o, annotationHolder)
super.visitVarDefSymbol(o)
}
override def visitVarSymbol(o:JawaVarSymbol): Unit = {
AnnotatorHighlighter.highlightElement(o, annotationHolder)
super.visitVarSymbol(o)
}
override def visitFieldDefSymbol(o: JawaFieldDefSymbol): Unit = {
AnnotatorHighlighter.highlightElement(o, annotationHolder)
super.visitFieldDefSymbol(o)
}
override def visitFieldNameSymbol(o:JawaFieldNameSymbol): Unit = {
AnnotatorHighlighter.highlightElement(o, annotationHolder)
super.visitFieldNameSymbol(o)
}
override def visitStaticFieldDefSymbol(o: JawaStaticFieldDefSymbol): Unit = {
AnnotatorHighlighter.highlightElement(o, annotationHolder)
super.visitStaticFieldDefSymbol(o)
}
override def visitStaticFieldNameSymbol(o:JawaStaticFieldNameSymbol): Unit = {
AnnotatorHighlighter.highlightElement(o, annotationHolder)
super.visitStaticFieldNameSymbol(o)
}
override def visitMethodDefSymbol(o: JawaMethodDefSymbol): Unit = {
AnnotatorHighlighter.highlightElement(o, annotationHolder)
super.visitMethodDefSymbol(o)
}
override def visitMethodNameSymbol(o:JawaMethodNameSymbol): Unit = {
AnnotatorHighlighter.highlightElement(o, annotationHolder)
super.visitMethodNameSymbol(o)
}
override def visitLocationDefSymbol(o: JawaLocationDefSymbol): Unit = {
AnnotatorHighlighter.highlightElement(o, annotationHolder)
super.visitLocationDefSymbol(o)
}
override def visitLocationSymbol(o:JawaLocationSymbol): Unit = {
AnnotatorHighlighter.highlightElement(o, annotationHolder)
super.visitLocationSymbol(o)
}
override def visitDefaultAnnotation(o: JawaDefaultAnnotation): Unit = {
AnnotatorHighlighter.highlightElement(o, annotationHolder)
super.visitDefaultAnnotation(o)
}
override def visitNumberLiteral(o: JawaNumberLiteral): Unit = {
AnnotatorHighlighter.highlightElement(o, annotationHolder)
super.visitNumberLiteral(o)
}
override def visitParam(o: JawaParam): Unit = {
AnnotatorHighlighter.highlightElement(o, annotationHolder)
super.visitParam(o)
}
override def visitSignatureSymbol(o: JawaSignatureSymbol): Unit = {
AnnotatorHighlighter.highlightElement(o, annotationHolder)
super.visitSignatureSymbol(o)
}
})
} | arguslab/argus-cit-intellij | src/main/scala/org/argus/cit/intellij/jawa/annotator/JawaHighlightingAnnotator.scala | Scala | epl-1.0 | 3,696 |
/*
* @author Philip Stutz
*
* Copyright 2014 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.triplerush.japi
import com.signalcollect.triplerush.{ TripleRush => ScalaTripleRush }
import com.signalcollect.triplerush.sparql.Sparql
/**
* Java wrapper for TripleRush.
*/
class TripleRush {
protected val wrappedScalaTripleRush = ScalaTripleRush()
/**
* This function has to be called after loading and before executing queries.
* Among other things it gathers the triple statistics for the query optimizer.
*/
def prepareExecution {
wrappedScalaTripleRush.prepareExecution
}
/**
* Adds the triple with subject 's', predicate 'p', and object 'o' to the store.
* TripleRush does not support special treatment for literals yet, so everything
* is treated as a string.
*/
def addTriple(s: String, p: String, o: String) {
wrappedScalaTripleRush.addTriple(s, p, o)
}
/**
* Loads the triples from the ntriples file at 'filePath'.
*/
def loadNtriples(filePath: String) {
wrappedScalaTripleRush.loadNtriples(filePath)
}
/**
* Executes the SPARQL query 'sparql'.
* Returns an iterator over result sets.
* Results for a given variable can be accessed by calling ResultSet.apply("x"),
* to for example retrieve the binding for the variable 'x'.
*/
def sparql(query: String): Iterator[String => String] = {
val scalaSparql = Sparql(query)(wrappedScalaTripleRush)
if (scalaSparql.isEmpty) {
Iterator.empty
} else {
scalaSparql.get.resultIterator
}
}
/**
* Releases the resources used by TripleRush.
*/
def shutdown {
wrappedScalaTripleRush.shutdown
}
}
| jacqueslk/triplerush-filter | src/main/scala/com/signalcollect/triplerush/japi/TripleRush.scala | Scala | apache-2.0 | 2,266 |
package au.net.hivemedia.crateconnector
import java.io.IOException
import io.crate.client.CrateClient
import org.scalatest.{Matchers, FlatSpec}
/**
* Test Case Class to test the storage and retrevial of items from database
*/
case class TestObject(testInt: Int, testString: String, testBoolean: Boolean,
testShort: Short, testDouble: Double, testLong: Long,
testFloat: Float, testByte: Byte, testList: List[_],
testMap: Map[String, _], testSet: Set[_]) extends CrateObject
/**
* Scala Test Spec used to test
* the functionality of CrateConnector
*
* @author Liam Haworth
* @version 1.0
*/
class CrateConnectorSpec extends FlatSpec with Matchers {
val crateDatabaseServer = "localhost:4300"
val testObject = TestObject(Int.MaxValue, "Testing123", true, Short.MaxValue, 9.87654321D, Long.MaxValue, 1.2345f, 0x32, List(1, "two", 0x03, 0.4f), Map("Test" -> "Map"), Set("1", 2, 0x3))
"CrateObject" should "be the superclass of TestObject" in {
testObject.getClass.getSuperclass should equal(classOf[CrateObject])
}
it should "expose helper methods on objects extending it" in {
testObject.getClass.getSuperclass.getDeclaredMethods.map(_.getName) should contain ("insert")
testObject.getClass.getSuperclass.getDeclaredMethods.map(_.getName) should contain ("update")
}
"CreateConnector" should "throw an IOException when no CrateClient is defined" in {
a [IOException] should be thrownBy {
CrateConnector.create("testdb", classOf[TestObject])(null)
}
}
it should "create a table in test database based on case class" in {
implicit val crateClient = new CrateClient(crateDatabaseServer)
CrateConnector.create("testdb", classOf[TestObject]) should be (true)
val sqlResult = crateClient.sql("select * from information_schema.tables where table_name='testobject' and schema_name='testdb'").get
sqlResult.rowCount() should be (1)
}
it should "be able to test the existence of the table" in {
implicit val crateClient = new CrateClient(crateDatabaseServer)
CrateConnector.exists("testdb", classOf[TestObject]) should be (true)
}
it should "insert an object into a table" in {
implicit val crateClient = new CrateClient(crateDatabaseServer)
var sqlResult = crateClient.sql("select * from testdb.testobject").get
sqlResult.rowCount() should be (0)
testObject.insert("testdb")
Thread.sleep(2500)
sqlResult = crateClient.sql("select * from testdb.testobject").get
sqlResult.rowCount() should be (1)
}
it should "make a list of objects from database" in {
implicit val crateClient = new CrateClient(crateDatabaseServer)
val objects = CrateConnector.select[TestObject]("testdb", classOf[TestObject])
objects.size should be (1)
}
it should "update a record from an object and conditional" in {
implicit val crateClient = new CrateClient(crateDatabaseServer)
var objects = CrateConnector.select[TestObject]("testdb", classOf[TestObject])
objects(0).testInt should equal(Int.MaxValue)
val updatedObject = TestObject(Int.MinValue, "Testing123", true, Short.MaxValue, 9.87654321D, Long.MaxValue, 1.2345f, 0x32, List(1, "two", 0x03, 0.4f), Map("Test" -> "Map"), Set("1", 2, 0x3))
updatedObject.update("testdb", "where testString = 'Testing123'")
Thread.sleep(2500)
objects = CrateConnector.select[TestObject]("testdb", classOf[TestObject])
objects(0).testInt should equal(Int.MinValue)
}
it should "drop the table" in {
implicit val crateClient = new CrateClient(crateDatabaseServer)
CrateConnector.exists("testdb", classOf[TestObject]) should be (true)
CrateConnector.drop("testdb", classOf[TestObject]) should be (true)
}
}
| HiveMedia/crate-connector | src/test/scala/au/net/hivemedia/crateconnector/CrateConnectorSpec.scala | Scala | apache-2.0 | 3,782 |
package com.idyria.osi.ooxoo.db
| richnou/ooxoo-db | src/main/scala/com/idyria/osi/ooxoo/db/DBOption.scala | Scala | lgpl-3.0 | 33 |
/*
* Copyright (c) 2012-2013 SnowPlow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.hadoop.hive
// Specs2
import org.specs2.mutable.Specification
// SnowPlow Utils
import com.snowplowanalytics.util.Tap._
// Deserializer
import test.{SnowPlowDeserializer, SnowPlowEvent, SnowPlowTest}
class StructEventTest extends Specification {
// Toggle if tests are failing and you want to inspect the struct contents
implicit val _DEBUG = false
// Transaction item
val row = "2012-05-27 11:35:53 DFW3 3343 99.116.172.58 GET d3gs014xn8p70.cloudfront.net /ice.png 200 http://www.psychicbazaar.com/2-tarot-cards/genre/all/type/all?p=5 Mozilla/5.0%20(Windows%20NT%206.1;%20WOW64;%20rv:12.0)%20Gecko/20100101%20Firefox/12.0 &e=ev&ev_ca=Mixes&ev_ac=Play&ev_la=MRC%2Ffabric-0503-mix&ev_pr=mp3&ev_va=0.0&tid=191001&duid=ea7de42957742fbb&vid=1&aid=CFe23a&lang=en-GB&f_pdf=0&f_qt=1&f_realp=0&f_wma=1&f_dir=0&f_fla=1&f_java=1&f_gears=0&f_ag=0&res=1920x1080&cookie=1&url=file%3A%2F%2F%2Fhome%2Falex%2Fasync.html"
val expected = new SnowPlowEvent().tap { e =>
e.dt = "2012-05-27"
e.collector_dt = "2012-05-27"
e.collector_tm = "11:35:53"
e.event = "struct" // Structured event
e.event_vendor = "com.snowplowanalytics"
e.txn_id = "191001"
e.ev_category = "Mixes"
e.ev_action = "Play"
e.ev_label = "MRC/fabric-0503-mix"
e.ev_property = "mp3"
e.ev_value = "0.0"
}
"The SnowPlow event row \"%s\"".format(row) should {
val actual = SnowPlowDeserializer.deserialize(row)
// General fields
"have dt (Legacy Hive Date) = %s".format(expected.dt) in {
actual.dt must_== expected.dt
}
"have collector_dt (Collector Date) = %s".format(expected.collector_dt) in {
actual.collector_dt must_== expected.collector_dt
}
"have collector_tm (Collector Time) = %s".format(expected.collector_tm) in {
actual.collector_tm must_== expected.collector_tm
}
"have event (Event Type) = %s".format(expected.event) in {
actual.event must_== expected.event
}
"have event_vendor (Event Vendor) = %s".format(expected.event_vendor) in {
actual.event_vendor must_== expected.event_vendor
}
"have a valid (stringly-typed UUID) event_id" in {
SnowPlowTest.stringlyTypedUuid(actual.event_id) must_== actual.event_id
}
"have txn_id (Transaction ID) = %s".format(expected.txn_id) in {
actual.txn_id must_== expected.txn_id
}
// The event fields
"have ev_category (Event Category) = %s".format(expected.ev_category) in {
actual.ev_category must_== expected.ev_category
}
"have ev_action (Event Action) = %s".format(expected.ev_action) in {
actual.ev_action must_== expected.ev_action
}
"have ev_label (Event Label) = %s".format(expected.ev_label) in {
actual.ev_label must_== expected.ev_label
}
"have ev_property (Event Property) = %s".format(expected.ev_property) in {
actual.ev_property must_== expected.ev_property
}
"have ev_value (Event Value) = %s".format(expected.ev_value) in {
actual.ev_value must_== expected.ev_value
}
}
} | richo/snowplow | 3-etl/hive-etl/snowplow-log-deserializers/src/test/scala/com/snowplowanalytics/snowplow/hadoop/hive/StructEventTest.scala | Scala | apache-2.0 | 3,786 |
import scala.language.higherKinds
trait 柴 {
type 尾 <: 柴
def 尾: 尾
type 当前燃料 <: 燃料
def 当前燃料: 当前燃料
type 初始化[I <: 燃料] <: 柴
def 初始化[I <: 燃料](i: I): 初始化[I]
type 点火[I <: 火苗] <: 柴
def 点火[I <: 火苗](i: I): 点火[I]
}
class 一块柴[S <: 燃料, T <: 柴](override val 当前燃料: S, override val 尾: T) extends 柴 {
self =>
override type 尾 = T
override type 当前燃料 = S
override type 初始化[I <: 燃料] = 一块柴[I, 一块柴[S, T]]
override def 初始化[I <: 燃料](i: I): 一块柴[I, 一块柴[S, T]] = new 一块柴(i, self)
override type 点火[I <: 火苗] = ({
type I1 = I#flatMap[当前燃料]
type I2 = I#叠减[当前燃料]
type I3 = T#点火[I1]
type II = 一块柴[I2, I3]
})#II
override def 点火[I <: 火苗](i: I): 点火[I] = {
val i1: I#flatMap[当前燃料] = i.flatMap(当前燃料)
val i2 = i.叠减(当前燃料)
val i3 = 尾.点火(i1)
new 一块柴(i2, i3)
}
}
class 初始人生 extends 柴 {
self =>
override type 尾 = 初始人生
override type 当前燃料 = 零燃料
override def 尾 = self
override def 当前燃料 = 零燃料.零燃料
override type 初始化[I <: 燃料] = 一块柴[I, 初始人生]
override def 初始化[I <: 燃料](i: I): 一块柴[I, 初始人生] = new 一块柴(i, self)
override type 点火[I <: 火苗] = 初始人生
override def 点火[I <: 火苗](i: I): 初始人生 = self
}
object 初始人生 {
val 初始人生: 初始人生 = new 初始人生
}
| djx314/ubw | raw07-晒太阳/src/main/scala/Chai.scala | Scala | bsd-3-clause | 1,655 |
package org.scalacheck.ops.time
import org.scalacheck.Gen
private[time] trait FromLong {
self: AbstractTimeGenerators =>
protected[time] def now(implicit params: ParamsType): InstantType = asInstant(System.currentTimeMillis())
protected[time] def asInstant(millis: Long)(implicit params: ParamsType): InstantType
protected[time] def asDuration(millis: Long): DurationType
protected[time] def asLong(duration: DurationType): Long
protected[time] def asLong(datetime: InstantType)(implicit params: ParamsType): Long
override def between(start: InstantType, end: InstantType)
(implicit dateTimeParams: ParamsType = defaultParams): Gen[InstantType] = {
for {
scalar <- Gen.choose(asLong(start), asLong(end))
} yield asInstant(scalar)
}
}
| gloriousfutureio/scalacheck-ops | core_1-12/src/main/scala/org/scalacheck/ops/time/FromLong.scala | Scala | apache-2.0 | 776 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.api
import java.util.concurrent.ConcurrentHashMap
import org.locationtech.geomesa.index.utils.SplitArrays
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import org.opengis.feature.simple.SimpleFeatureType
trait ShardStrategy {
/**
* Get a shard for the given feature. This should be consistent for a given feature ID
*
* @param feature feature
* @return
*/
def apply(feature: WritableFeature): Array[Byte]
/**
* All possible shards
*
* @return
*/
def shards: Seq[Array[Byte]]
/**
* Length of each shard. 0 indicates no sharding
*
* @return
*/
def length: Int
}
object ShardStrategy {
private val instances = new ConcurrentHashMap[Int, ShardStrategyImpl]()
def apply(count: Int): ShardStrategy = {
if (count < 2) { NoShardStrategy } else {
var strategy = instances.get(count)
if (strategy == null) {
strategy = new ShardStrategyImpl(SplitArrays(count))
instances.put(count, strategy)
}
strategy
}
}
object NoShardStrategy extends ShardStrategy {
override def apply(feature: WritableFeature): Array[Byte] = Array.empty
override val shards: Seq[Array[Byte]] = Seq.empty
override val length: Int = 0
}
object ZShardStrategy {
def apply(sft: SimpleFeatureType): ShardStrategy = ShardStrategy(sft.getZShards)
}
object AttributeShardStrategy {
def apply(sft: SimpleFeatureType): ShardStrategy = ShardStrategy(sft.getAttributeShards)
}
class ShardStrategyImpl(override val shards: IndexedSeq[Array[Byte]]) extends ShardStrategy {
override def apply(feature: WritableFeature): Array[Byte] = {
try { shards(feature.idHash % shards.length) } catch {
// handle case where hash is Int.MinValue, which isn't handled by math.abs
case e: IndexOutOfBoundsException => shards.head
}
}
override val length: Int = shards.head.length
}
}
| aheyne/geomesa | geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/api/ShardStrategy.scala | Scala | apache-2.0 | 2,472 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.cdi.spring.config
import java.math.BigDecimal
import org.scalatest.matchers.should.Matchers
import org.scalatest.funspec.AnyFunSpec
import org.springframework.core.convert.support.DefaultConversionService
class ConvertTest extends AnyFunSpec with Matchers {
describe("Spring") {
it("Convert number and boolean") {
val conversion = new DefaultConversionService();
conversion.convert("4.5", classOf[Number]) should equal(new BigDecimal("4.5"))
conversion.convert("true", classOf[Boolean]) should be(true)
}
}
}
| beangle/cdi | spring/src/test/scala/org/beangle/cdi/spring/config/ConvertTest.scala | Scala | lgpl-3.0 | 1,284 |
/*
* Copyright (c) 2018. Fengguo Wei and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License v2.0
* which accompanies this distribution, and is available at
* https://www.apache.org/licenses/LICENSE-2.0
*
* Detailed contributors are listed in the CONTRIBUTOR.md
*/
package org.argus.jawa.core.compiler.interactive
import org.argus.jawa.core.ast.{CompilationUnit, JawaAstNode, JawaSymbol}
import org.argus.jawa.core.compiler.parser.JawaParser
import org.argus.jawa.core.compiler.util._
import org.argus.jawa.core.io.{AbstractFile, Position, SourceFile}
import scala.util.{Failure, Success}
/** Interface of interactive compiler to a client such as an IDE
* The model the presentation compiler consists of the following parts:
*
* unitOfFile: The map from sourcefiles to loaded units. A sourcefile/unit is loaded if it occurs in that map.
*
* manipulated by: removeUnitOf, reloadSources.
*
* A call to reloadSources will add the given sources to the loaded units, and
* start a new background compiler pass to compile all loaded units (with the indicated sources first).
* The background compiler thread can be interrupted each time an AST node is
* completely resolved in the following ways:
* 1. by a new call to reloadSources. This starts a new background compiler pass.
* 2. by a call to askScopeCompletion, askToDoFirst, askLinkPos, askLastType.
* 3. by raising an exception in the scheduler.
* 4. by passing a high-priority action wrapped in ask { ... }.
*
* Actions under 1-2 can themselves be interrupted.
* High-priority actions under 4 cannot; they always run to completion.
* So these high-priority actions should to be short.
*
* Normally, an interrupted action continues after the interrupting action is finished.
* However, if the interrupting action created a new run, the interrupted
* action is aborted. If there's an outstanding response, it will be set to
* a Right value with a FreshRunReq exception.
*/
trait CompilerControl { self: Global =>
type Response[T] = org.argus.jawa.core.compiler.interactive.Response[T]
/** The scheduler by which client and compiler communicate
* Must be initialized before starting compilerRunner
*/
@volatile protected[interactive] var scheduler = new WorkScheduler
/** Return the compilation unit attached to a source file, or None
* if source is not loaded.
*/
def getUnitOf(s: SourceFile): Option[RichCompilationUnit] = getCompilationUnit(s.file)
/** Run operation `op` on a compilation unit associated with given `source`.
* If source has a loaded compilation unit, this one is passed to `op`.
* Otherwise a new compilation unit is created, but not added to the set of loaded units.
*/
def onUnitOf[T](source: SourceFile)(op: RichCompilationUnit => T): T = {
val rcu: RichCompilationUnit = getCompilationUnit(source.file) match {
case Some(r) => r
case None =>
val cu = parseCompilationUnit(source).get
RichCompilationUnit(cu)
}
op(rcu)
}
/** Removes the CompilationUnit corresponding to the given SourceFile
* from consideration for recompilation.
*/
def removeUnitOf(s: SourceFile): Option[RichCompilationUnit] = { removeCompilationUnit(s.file) }
private def postWorkItem(item: WorkItem): Unit =
if (item.onCompilerThread) item() else scheduler.postWorkItem(item)
/** Makes sure a set of compilation units is loaded and parsed.
* Returns () to syncvar `response` on completion.
* Afterwards a new background compiler run is started with
* the given sources at the head of the list of to-be-compiled sources.
*/
def askReload(sources: List[SourceFile], response: Response[Unit]): Unit = {
val superseeded = scheduler.dequeueAll {
case ri: ReloadItem if ri.sources == sources => Some(ri)
case _ => None
}
superseeded.foreach(_.response.set(()))
postWorkItem(ReloadItem(sources, response))
}
/** Removes source files and toplevel symbols, and issues a new typer run.
* Returns () to syncvar `response` on completion.
*/
def askFilesDeleted(sources: List[SourceFile], response: Response[Unit]): Unit = {
postWorkItem(FilesDeletedItem(sources, response))
}
/** Sets sync var `response` to the position of the definition of the given link in
* the given sourcefile.
*
* @param sym The symbol referenced by the link (might come from a classfile)
* @param response A response that will be set to the following:
* If `source` contains a definition that is referenced by the given link
* the position of that definition, otherwise NoPosition.
* Note: This operation does not automatically load `source`. If `source`
* is unloaded, it stays that way.
*/
def askLinkPos(sym: JawaSymbol, response: Response[Position]): Unit =
postWorkItem(AskLinkPosItem(sym, response))
/** Asks to do unit corresponding to given source file on present and subsequent type checking passes.
* If the file is in the 'crashedFiles' ignore list it is removed and typechecked normally.
*/
def askToDoFirst(source: SourceFile): Unit =
postWorkItem(new AskToDoFirstItem(source))
/** Sets sync var `response` to the smallest fully attributed tree that encloses position `pos`.
* Note: Unlike for most other ask... operations, the source file belonging to `pos` needs not be loaded.
*/
def askTypeAt(pos: Position, response: Response[Option[JawaSymbol]]): Unit =
postWorkItem(AskTypeAtItem(pos, response))
/** If source if not yet loaded, get an outline view with askParseEntered.
* If source is loaded, return it.
* In both cases, set response to parsed tree.
* @param keepSrcLoaded If set to `true`, source file will be kept as a loaded unit afterwards.
*/
def askStructure(keepSrcLoaded: Boolean)(source: SourceFile, response: Response[CompilationUnit]): Unit = {
getCompilationUnit(source.file) match {
case Some(rcu) => respond(response) {rcu.cu}
case None => askParsedEntered(source, keepSrcLoaded, response)
}
}
/** Set sync var `response` to the parse tree of `source` with all top-level symbols entered.
* @param source The source file to be analyzed
* @param keepLoaded If set to `true`, source file will be kept as a loaded unit afterwards.
* If keepLoaded is `false` the operation is run at low priority, only after
* everything is brought up to date in a regular type checker run.
* @param response The response.
*/
def askParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[CompilationUnit]): Unit =
postWorkItem(AskParsedEnteredItem(source, keepLoaded, response))
/** Cancels current compiler run and start a fresh one where everything will be re-typechecked
* (but not re-loaded).
*/
def askReset(): Unit = scheduler raise new FreshRunReq
/** Tells the compile server to shutdown, and not to restart again */
def askShutdown(): Unit = scheduler raise ShutdownReq
/** Returns parse tree for source `source`. No symbols are entered. Syntax errors are reported.
*
* This method is thread-safe and as such can safely run outside of the presentation
* compiler thread.
*/
def parseCompilationUnit(source: SourceFile): Option[CompilationUnit] = {
JawaParser.parse[CompilationUnit](Right(source), resolveBody = true, reporter, classOf[CompilationUnit]) match {
case Success(cu) => Some(cu)
case Failure(_) => None
}
}
/** Asks for a computation to be done quickly on the presentation compiler thread */
def ask[A](op: () => A): A = if (self.onCompilerThread) op() else scheduler doQuickly op
/** Asks for a computation to be done on presentation compiler thread, returning
* a response with the result or an exception
*/
def askForResponse[A](op: () => A): Response[A] = {
val r = new Response[A]
if (self.onCompilerThread) {
try { r set op() }
catch { case exc: Throwable => r raise exc }
r
} else {
val ir = scheduler askDoQuickly op
ir onComplete {
case Left(result) => r set result
case Right(exc) => r raise exc
}
r
}
}
def onCompilerThread: Boolean = Thread.currentThread == compileRunner
// items that get sent to scheduler
abstract class WorkItem extends (() => Unit) {
val onCompilerThread: Boolean = self.onCompilerThread
/** Raise a MissingReponse, if the work item carries a response. */
def raiseMissing(): Unit
}
case class ReloadItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem {
def apply(): Unit = reload(sources, response)
override def toString: String = "reload "+sources
def raiseMissing(): Unit =
response raise new MissingResponse
}
class AskToDoFirstItem(val source: SourceFile) extends WorkItem {
def apply(): Unit = {
moveToFront(List(source))
enableIgnoredFile(source.file)
}
override def toString: String = "dofirst "+source
def raiseMissing(): Unit = ()
}
case class AskTypeAtItem(pos: Position, response: Response[Option[JawaSymbol]]) extends WorkItem {
def apply(): Unit = self.getTypeAt(pos, response)
override def toString: String = "typeat "+pos.source+" "+pos.show
def raiseMissing(): Unit =
response raise new MissingResponse
}
case class AskLinkPosItem(sym: JawaSymbol, response: Response[Position]) extends WorkItem {
def apply(): Unit = self.getLinkPos(sym, response)
override def toString: String = "linkpos "+sym
def raiseMissing(): Unit =
response raise new MissingResponse
}
case class AskParsedEnteredItem(source: SourceFile, keepLoaded: Boolean, response: Response[CompilationUnit]) extends WorkItem {
def apply(): Unit = self.getParsedEntered(source, keepLoaded, response, this.onCompilerThread)
override def toString: String = "getParsedEntered "+source+", keepLoaded = "+keepLoaded
def raiseMissing(): Unit =
response raise new MissingResponse
}
/** Locate smallest tree that encloses position
* @param pos Position must be loaded
*/
def locateAst(pos: Position): JawaAstNode = onUnitOf(pos.source) { unit => new Locator(pos) locateIn unit.cu }
case class FilesDeletedItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem {
def apply(): Unit = filesDeleted(sources, response)
override def toString: String = "files deleted "+sources
def raiseMissing(): Unit =
response raise new MissingResponse
}
/** A do-nothing work scheduler that responds immediately with MissingResponse.
*
* Used during compiler shutdown.
*/
class NoWorkScheduler extends WorkScheduler {
override def postWorkItem(action: Action): Unit = synchronized {
action match {
case w: WorkItem => w.raiseMissing()
case _: EmptyAction => // do nothing
case _ => println("don't know what to do with this " + action.getClass)
}
}
override def doQuickly[A](op: () => A): A = {
throw new FailedInterrupt(new Exception("Posted a work item to a compiler that's shutting down"))
}
override def askDoQuickly[A](op: () => A): InterruptReq { type R = A } = {
val ir: InterruptReq {
type R = A
} = new InterruptReq {
type R = A
val todo: () => Nothing = () => throw new MissingResponse
}
ir.execute()
ir
}
}
}
// ---------------- Interpreted exceptions -------------------
/** Signals a request for a fresh background compiler run.
* Note: The object has to stay top-level so that the PresentationCompilerThread may access it.
*/
class FreshRunReq extends ControlThrowable
/** Signals a request for a shutdown of the presentation compiler.
* Note: The object has to stay top-level so that the PresentationCompilerThread may access it.
*/
object ShutdownReq extends ControlThrowable
class NoSuchUnitError(file: AbstractFile) extends Exception("no unit found for file "+file)
class MissingResponse extends Exception("response missing")
| arguslab/Argus-SAF | jawa/src/main/scala/org/argus/jawa/core/compiler/interactive/CompilerControl.scala | Scala | apache-2.0 | 12,300 |
package com.seanshubin.contract.domain
import java.io._
import java.nio.channels.Channel
import java.util.{Map, Properties}
trait SystemContract {
def in: InputStream
def out: PrintStream
def err: PrintStream
def setIn(in: InputStream)
def setOut(out: PrintStream)
def setErr(err: PrintStream)
def console: Console
def inheritedChannel: Channel
def setSecurityManager(s: SecurityManager)
def getSecurityManager: SecurityManager
def currentTimeMillis: Long
def nanoTime: Long
def arraycopy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int)
def identityHashCode(x: AnyRef): Int
def getProperties: Properties
def lineSeparator: String
def setProperties(props: Properties)
def getProperty(key: String): String
def getProperty(key: String, default: String): String
def setProperty(key: String, value: String): String
def clearProperty(key: String): String
def getenv(name: String): String
def getenv: Map[String, String]
def exit(status: Int)
def gc()
def runFinalization()
@deprecated(
message =
"This method is inherently unsafe. It may result in" +
"finalizers being called on live objects while other threads are" +
"concurrently manipulating those objects, resulting in erratic" +
"behavior or deadlock.",
since = "JDK1.1")
def runFinalizersOnExit(value: Boolean)
def load(filename: String)
def loadLibrary(libname: String)
def mapLibraryName(libname: String): String
}
| SeanShubin/contract | domain/src/main/scala/com/seanshubin/contract/domain/SystemContract.scala | Scala | unlicense | 1,519 |
package exercises.ch04
object Ex04 {
def sequence[A](a: List[Option[A]]): Option[List[A]] = {
val newList = a.flatMap(a => a match {
case None => List();
case Some(aa) => List(aa)
})
if (newList.size < a.size) None else Some(newList)
}
def main(args: Array[String]): Unit = {
println(sequence(List(None, Some(2), Some(3))))
println(sequence(List(Some(1), None, Some(3))))
println(sequence(List(Some(1), Some(2), Some(3))))
}
}
| VladMinzatu/fpinscala-exercises | src/main/scala/exercises/ch04/Ex04.scala | Scala | mit | 474 |
package core.formatter.marketplace.product
import com.fasterxml.jackson.core.JsonGenerator
import com.fasterxml.jackson.databind.module.SimpleModule
import com.fasterxml.jackson.databind.{ JsonSerializer, ObjectMapper, SerializerProvider }
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.lvxingpai.model.account.UserInfo
import com.lvxingpai.model.marketplace.order.Order
import com.lvxingpai.model.marketplace.product._
import com.lvxingpai.model.misc.ImageItem
import core.formatter.BaseFormatter
import core.formatter.misc.ImageItemSerializer
import core.formatter.user.UserSerializer
import scala.collection.JavaConversions._
import scala.language.postfixOps
/**
* Created by topy on 2016/1/25.
*/
class CommodityCommentFormatter extends BaseFormatter {
override protected val objectMapper = {
val mapper = new ObjectMapper()
val module = new SimpleModule()
mapper.registerModule(DefaultScalaModule)
module.addSerializer(classOf[CommodityComment], new CommodityCommentSerializer)
module.addSerializer(classOf[BaseCommodityComment], new CommodityReplySerializer)
module.addSerializer(classOf[UserInfo], new UserSerializer)
module.addSerializer(classOf[Order], new CommentOrderSerializer)
module.addSerializer(classOf[Commodity], new CommentCommoditySerializer)
module.addSerializer(classOf[CommodityPlan], new CommentCommodityPlanSerializer)
module.addSerializer(classOf[ImageItem], new ImageItemSerializer)
mapper.registerModule(module)
mapper
}
class CommodityCommentSerializer extends JsonSerializer[CommodityComment] {
override def serialize(c: CommodityComment, gen: JsonGenerator, serializers: SerializerProvider): Unit = {
gen.writeStartObject()
gen.writeStringField("id", Option(c.id.toString) getOrElse "")
gen.writeStringField("contents", Option(c.contents) getOrElse "")
gen.writeNumberField("rating", c.rating)
gen.writeBooleanField("anonymous", c.anonymous)
gen.writeFieldName("user")
val userInfo = c.user
if (Option(userInfo).nonEmpty && !c.anonymous)
serializers.findValueSerializer(classOf[UserInfo], null).serialize(userInfo, gen, serializers)
else {
gen.writeStartObject()
gen.writeEndObject()
}
// images
gen.writeFieldName("images")
gen.writeStartArray()
val images = c.images
if (images != null) {
val ret = serializers.findValueSerializer(classOf[ImageItem], null)
for (image <- images)
ret.serialize(image, gen, serializers)
}
gen.writeEndArray()
gen.writeFieldName("reply")
val reply = c.reply
gen.writeStartObject()
if (Option(reply).nonEmpty)
serializers.findValueSerializer(classOf[BaseCommodityComment], null).serialize(reply, gen, serializers)
gen.writeEndObject()
// TODO 由于Order中的商品类型问题,暂时注掉
// gen.writeFieldName("order")
// val order = c.order
// if (Option(order).nonEmpty)
// serializers.findValueSerializer(classOf[Order], null).serialize(order, gen, serializers)
// else {
// gen.writeStartObject()
// gen.writeEndObject()
// }
gen.writeNumberField("createTime", if (c.createTime != null) c.createTime.getTime else 0)
gen.writeNumberField("updateTime", if (c.updateTime != null) c.updateTime.getTime else 0)
gen.writeEndObject()
}
}
class CommodityReplySerializer extends JsonSerializer[BaseCommodityComment] {
override def serialize(b: BaseCommodityComment, gen: JsonGenerator, serializers: SerializerProvider): Unit = {
gen.writeStringField("contents", Option(b.contents) getOrElse "")
// images
gen.writeFieldName("images")
gen.writeStartArray()
val images = b.images
if (images != null) {
val ret = serializers.findValueSerializer(classOf[ImageItem], null)
for (image <- images)
ret.serialize(image, gen, serializers)
}
gen.writeEndArray()
}
}
class CommentOrderSerializer extends JsonSerializer[Order] {
override def serialize(order: Order, gen: JsonGenerator, serializers: SerializerProvider): Unit = {
gen.writeStartObject()
//gen.writeStringField("id", Option(order.id.toString) getOrElse "")
gen.writeNumberField("orderId", order.orderId)
gen.writeStringField("status", Option(order.status) getOrElse "")
// Commodity
gen.writeFieldName("commodity")
val commodity = order.commodity
if (commodity != null) {
val retSeller = serializers.findValueSerializer(classOf[Commodity], null)
retSeller.serialize(commodity, gen, serializers)
}
gen.writeEndObject()
}
}
class CommentCommoditySerializer extends JsonSerializer[Commodity] {
override def serialize(commodity: Commodity, gen: JsonGenerator, serializers: SerializerProvider): Unit = {
gen.writeStartObject()
if (commodity.id != null)
gen.writeStringField("id", commodity.id.toString)
gen.writeNumberField("commodityId", commodity.commodityId)
gen.writeStringField("title", Option(commodity.title) getOrElse "")
gen.writeFieldName("plans")
gen.writeStartArray()
val plans = commodity.plans
if (plans != null) {
val ret = serializers.findValueSerializer(classOf[CommodityPlan], null)
for (pl <- plans)
ret.serialize(pl, gen, serializers)
}
gen.writeEndArray()
gen.writeEndObject()
}
}
class CommentCommodityPlanSerializer extends JsonSerializer[CommodityPlan] {
override def serialize(commodityPlan: CommodityPlan, gen: JsonGenerator, serializers: SerializerProvider): Unit = {
gen.writeStartObject()
if (commodityPlan.planId != null)
gen.writeStringField("planId", commodityPlan.planId)
gen.writeStringField("title", Option(commodityPlan.title) getOrElse "")
gen.writeEndArray()
gen.writeEndObject()
}
}
}
object CommodityCommentFormatter {
lazy val instance = new CommodityCommentFormatter
}
| Lvxingpai/Hanse | app/core/formatter/marketplace/product/CommodityCommentFormatter.scala | Scala | apache-2.0 | 6,183 |
package com.arcusys.learn.liferay.util
import javax.portlet.{ActionRequest, PortletRequest, RenderRequest}
import javax.servlet.http.HttpServletRequest
import com.arcusys.learn.liferay.LiferayClasses.LUser
import com.liferay.portal.kernel.security.auth.CompanyThreadLocal
import com.liferay.portal.kernel.service.{CompanyLocalServiceUtil, ServiceContextThreadLocal}
import com.liferay.portal.kernel.upload.UploadPortletRequest
import com.liferay.portal.kernel.util.PortalUtil
object PortalUtilHelper {
def getOriginalServletRequest(req: HttpServletRequest) = PortalUtil.getOriginalServletRequest(req)
def getPortalURL(req: RenderRequest): String = PortalUtil.getPortalURL(req)
def getPortletId(request: RenderRequest) = PortalUtil.getPortletId(request)
def getClassNameId(className: String): Long = PortalUtil.getClassNameId(className)
def getBasicAuthUserId(request: HttpServletRequest): Long = PortalUtil.getBasicAuthUserId(request)
def getUser(request: HttpServletRequest): LUser = PortalUtil.getUser(request)
def getUserId(request: HttpServletRequest): Long = PortalUtil.getUserId(request)
def getUploadPortletRequest(request: ActionRequest): UploadPortletRequest = PortalUtil.getUploadPortletRequest(request)
def getPortalURL(req: HttpServletRequest): String = PortalUtil.getPortalURL(req)
def getPortalURL(virtualHost: String, port: Int, isSecure: Boolean): String = PortalUtil.getPortalURL(virtualHost, port, isSecure)
def getPortalPort(isSecure: Boolean): Int = PortalUtil.getPortalPort(isSecure)
def getCompanyId(portletRequest: PortletRequest): Long =
PortalUtil.getCompanyId(portletRequest)
def getCompanyId(portletRequest: HttpServletRequest): Long =
PortalUtil.getCompanyId(portletRequest)
def getPathMain: String = PortalUtil.getPathMain
def getHttpServletRequest(portletRequest: PortletRequest): HttpServletRequest =
PortalUtil.getHttpServletRequest(portletRequest)
def getDefaultCompanyId: Long = PortalUtil.getDefaultCompanyId
def getLocalHostUrl: String = {
val companyId = CompanyThreadLocal.getCompanyId
getLocalHostUrlForCompany(companyId)
}
def getLocalHostUrlForCompany(companyId: Long): String = {
val request = Option(ServiceContextThreadLocal.getServiceContext)
.flatMap(s => Option(s.getRequest))
request match {
case Some(r) => getLocalHostUrl(companyId, r)
case None => getLocalHostUrl(companyId)
}
}
def getLocalHostUrl(companyId: Long, request: HttpServletRequest): String = {
lazy val company = CompanyLocalServiceUtil.getCompany(companyId)
val hostName = company.getVirtualHostname
val port = request.getLocalPort
PortalUtil.getPortalURL(hostName, port, request.isSecure) + PortalUtil.getPathContext
}
def getLocalHostUrl(companyId: Long, isSecure : Boolean = false): String = {
getHostWithPort(companyId, isSecure) + PortalUtil.getPathContext
}
def getHostWithPort(companyId: Long, isSecure: Boolean = false): String = {
val company = CompanyLocalServiceUtil.getCompany(companyId)
val hostName = company.getVirtualHostname
val port = getPortalPort(isSecure)
getPortalURL(hostName, port, isSecure)
}
def getHostName(companyId: Long): String =
"http://" +CompanyLocalServiceUtil.getCompany(companyId).getVirtualHostname
def getPathContext(request: PortletRequest): String = {
PortalUtil.getPathContext(request)
}
def getPathContext: String = {
PortalUtil.getPathContext
}
def getServletPathContext(request: PortletRequest): String = {
PortalUtil.getPathModule
}
}
| arcusys/Valamis | learn-liferay700-services/src/main/scala/com/arcusys/learn/liferay/util/PortalUtilHelper.scala | Scala | gpl-3.0 | 3,594 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.network.yarn
import java.io.{DataOutputStream, File, FileOutputStream}
import scala.annotation.tailrec
import org.apache.commons.io.FileUtils
import org.apache.hadoop.yarn.api.records.ApplicationId
import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.apache.hadoop.yarn.server.api.{ApplicationInitializationContext, ApplicationTerminationContext}
import org.scalatest.{BeforeAndAfterEach, Matchers}
import org.apache.spark.SparkFunSuite
import org.apache.spark.network.shuffle.ShuffleTestAccessor
import org.apache.spark.network.shuffle.protocol.ExecutorShuffleInfo
class YarnShuffleServiceSuite extends SparkFunSuite with Matchers with BeforeAndAfterEach {
private[yarn] var yarnConfig: YarnConfiguration = new YarnConfiguration
override def beforeEach(): Unit = {
yarnConfig.set(YarnConfiguration.NM_AUX_SERVICES, "spark_shuffle")
yarnConfig.set(YarnConfiguration.NM_AUX_SERVICE_FMT.format("spark_shuffle"),
classOf[YarnShuffleService].getCanonicalName)
yarnConfig.setInt("spark.shuffle.service.port", 0)
yarnConfig.get("yarn.nodemanager.local-dirs").split(",").foreach { dir =>
val d = new File(dir)
if (d.exists()) {
FileUtils.deleteDirectory(d)
}
FileUtils.forceMkdir(d)
logInfo(s"creating yarn.nodemanager.local-dirs: $d")
}
}
var s1: YarnShuffleService = null
var s2: YarnShuffleService = null
var s3: YarnShuffleService = null
override def afterEach(): Unit = {
if (s1 != null) {
s1.stop()
s1 = null
}
if (s2 != null) {
s2.stop()
s2 = null
}
if (s3 != null) {
s3.stop()
s3 = null
}
}
test("executor state kept across NM restart") {
s1 = new YarnShuffleService
s1.init(yarnConfig)
val app1Id = ApplicationId.newInstance(0, 1)
val app1Data: ApplicationInitializationContext =
new ApplicationInitializationContext("user", app1Id, null)
s1.initializeApplication(app1Data)
val app2Id = ApplicationId.newInstance(0, 2)
val app2Data: ApplicationInitializationContext =
new ApplicationInitializationContext("user", app2Id, null)
s1.initializeApplication(app2Data)
val execStateFile = s1.registeredExecutorFile
execStateFile should not be (null)
val shuffleInfo1 = new ExecutorShuffleInfo(Array("/foo", "/bar"), 3, "sort")
val shuffleInfo2 = new ExecutorShuffleInfo(Array("/bippy"), 5, "hash")
val blockHandler = s1.blockHandler
val blockResolver = ShuffleTestAccessor.getBlockResolver(blockHandler)
ShuffleTestAccessor.registeredExecutorFile(blockResolver) should be (execStateFile)
blockResolver.registerExecutor(app1Id.toString, "exec-1", shuffleInfo1)
blockResolver.registerExecutor(app2Id.toString, "exec-2", shuffleInfo2)
ShuffleTestAccessor.getExecutorInfo(app1Id, "exec-1", blockResolver) should
be (Some(shuffleInfo1))
ShuffleTestAccessor.getExecutorInfo(app2Id, "exec-2", blockResolver) should
be (Some(shuffleInfo2))
if (!execStateFile.exists()) {
@tailrec def findExistingParent(file: File): File = {
if (file == null) file
else if (file.exists()) file
else findExistingParent(file.getParentFile())
}
val existingParent = findExistingParent(execStateFile)
assert(false, s"$execStateFile does not exist -- closest existing parent is $existingParent")
}
assert(execStateFile.exists(), s"$execStateFile did not exist")
// now we pretend the shuffle service goes down, and comes back up
s1.stop()
s2 = new YarnShuffleService
s2.init(yarnConfig)
s2.registeredExecutorFile should be (execStateFile)
val handler2 = s2.blockHandler
val resolver2 = ShuffleTestAccessor.getBlockResolver(handler2)
// now we reinitialize only one of the apps, and expect yarn to tell us that app2 was stopped
// during the restart
s2.initializeApplication(app1Data)
s2.stopApplication(new ApplicationTerminationContext(app2Id))
ShuffleTestAccessor.getExecutorInfo(app1Id, "exec-1", resolver2) should be (Some(shuffleInfo1))
ShuffleTestAccessor.getExecutorInfo(app2Id, "exec-2", resolver2) should be (None)
// Act like the NM restarts one more time
s2.stop()
s3 = new YarnShuffleService
s3.init(yarnConfig)
s3.registeredExecutorFile should be (execStateFile)
val handler3 = s3.blockHandler
val resolver3 = ShuffleTestAccessor.getBlockResolver(handler3)
// app1 is still running
s3.initializeApplication(app1Data)
ShuffleTestAccessor.getExecutorInfo(app1Id, "exec-1", resolver3) should be (Some(shuffleInfo1))
ShuffleTestAccessor.getExecutorInfo(app2Id, "exec-2", resolver3) should be (None)
s3.stop()
}
test("removed applications should not be in registered executor file") {
s1 = new YarnShuffleService
s1.init(yarnConfig)
val app1Id = ApplicationId.newInstance(0, 1)
val app1Data: ApplicationInitializationContext =
new ApplicationInitializationContext("user", app1Id, null)
s1.initializeApplication(app1Data)
val app2Id = ApplicationId.newInstance(0, 2)
val app2Data: ApplicationInitializationContext =
new ApplicationInitializationContext("user", app2Id, null)
s1.initializeApplication(app2Data)
val execStateFile = s1.registeredExecutorFile
execStateFile should not be (null)
val shuffleInfo1 = new ExecutorShuffleInfo(Array("/foo", "/bar"), 3, "sort")
val shuffleInfo2 = new ExecutorShuffleInfo(Array("/bippy"), 5, "hash")
val blockHandler = s1.blockHandler
val blockResolver = ShuffleTestAccessor.getBlockResolver(blockHandler)
ShuffleTestAccessor.registeredExecutorFile(blockResolver) should be (execStateFile)
blockResolver.registerExecutor(app1Id.toString, "exec-1", shuffleInfo1)
blockResolver.registerExecutor(app2Id.toString, "exec-2", shuffleInfo2)
val db = ShuffleTestAccessor.shuffleServiceLevelDB(blockResolver)
ShuffleTestAccessor.reloadRegisteredExecutors(db) should not be empty
s1.stopApplication(new ApplicationTerminationContext(app1Id))
ShuffleTestAccessor.reloadRegisteredExecutors(db) should not be empty
s1.stopApplication(new ApplicationTerminationContext(app2Id))
ShuffleTestAccessor.reloadRegisteredExecutors(db) shouldBe empty
}
test("shuffle service should be robust to corrupt registered executor file") {
s1 = new YarnShuffleService
s1.init(yarnConfig)
val app1Id = ApplicationId.newInstance(0, 1)
val app1Data: ApplicationInitializationContext =
new ApplicationInitializationContext("user", app1Id, null)
s1.initializeApplication(app1Data)
val execStateFile = s1.registeredExecutorFile
val shuffleInfo1 = new ExecutorShuffleInfo(Array("/foo", "/bar"), 3, "sort")
val blockHandler = s1.blockHandler
val blockResolver = ShuffleTestAccessor.getBlockResolver(blockHandler)
ShuffleTestAccessor.registeredExecutorFile(blockResolver) should be (execStateFile)
blockResolver.registerExecutor(app1Id.toString, "exec-1", shuffleInfo1)
// now we pretend the shuffle service goes down, and comes back up. But we'll also
// make a corrupt registeredExecutor File
s1.stop()
execStateFile.listFiles().foreach{_.delete()}
val out = new DataOutputStream(new FileOutputStream(execStateFile + "/CURRENT"))
out.writeInt(42)
out.close()
s2 = new YarnShuffleService
s2.init(yarnConfig)
s2.registeredExecutorFile should be (execStateFile)
val handler2 = s2.blockHandler
val resolver2 = ShuffleTestAccessor.getBlockResolver(handler2)
// we re-initialize app1, but since the file was corrupt there is nothing we can do about it ...
s2.initializeApplication(app1Data)
// however, when we initialize a totally new app2, everything is still happy
val app2Id = ApplicationId.newInstance(0, 2)
val app2Data: ApplicationInitializationContext =
new ApplicationInitializationContext("user", app2Id, null)
s2.initializeApplication(app2Data)
val shuffleInfo2 = new ExecutorShuffleInfo(Array("/bippy"), 5, "hash")
resolver2.registerExecutor(app2Id.toString, "exec-2", shuffleInfo2)
ShuffleTestAccessor.getExecutorInfo(app2Id, "exec-2", resolver2) should be (Some(shuffleInfo2))
s2.stop()
// another stop & restart should be fine though (eg., we recover from previous corruption)
s3 = new YarnShuffleService
s3.init(yarnConfig)
s3.registeredExecutorFile should be (execStateFile)
val handler3 = s3.blockHandler
val resolver3 = ShuffleTestAccessor.getBlockResolver(handler3)
s3.initializeApplication(app2Data)
ShuffleTestAccessor.getExecutorInfo(app2Id, "exec-2", resolver3) should be (Some(shuffleInfo2))
s3.stop()
}
}
| pronix/spark | yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala | Scala | apache-2.0 | 9,573 |
// Copyright 2014 Commonwealth Bank of Australia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package au.com.cba.omnia.maestro.core
package data
/**
* Represents a field of `A` with type `B`. It has the name of the field and a getter given an
* instance of `A`.
*/
case class Field[A : Manifest, B : Manifest](name: String, get: A => B) {
val structType = manifest[A]
val columnType = manifest[B]
/**
* Fields are consider equal if name and type of Thrift struct are equal and column type are equal
* Notice that this function will be working correctly only with Fields generated by FieldsMacro.
* Do not try to use it with custom created fields.
*
* @throws RuntimeException when encounters 2 fields with same name from the same Thrift struct with different column type,
* the intention is to indicate serious error in logic of your program.
*/
override def equals(other: Any): Boolean = other match {
case f: Field[_, _] => equalityTest(f)
case _ => false
}
private def equalityTest(f: Field[_, _]): Boolean = {
val equalFields = structType == f.structType && name == f.name
if (equalFields && columnType != f.columnType) {
throw new RuntimeException("Can't have two columns with the same name from the same Thrift structure with different column type")
}
equalFields
}
override def hashCode: Int = name.hashCode * 41 + structType.hashCode
/**
* Creates a field accessor for a higher structure, if this field is contained inside a bigger structure
* @param f function from the bigger structure to `A`
* @tparam C the type of the bigger structure
* @return Field[C, B]
*/
def zoom[C : Manifest](f: C => A):Field[C, B] = Field(name, f andThen get)
}
| CommBank/maestro | maestro-core/src/main/scala/au/com/cba/omnia/maestro/core/data/Field.scala | Scala | apache-2.0 | 2,317 |
/*
* Copyright (C) 2015 47 Degrees, LLC http://47deg.com hello@47deg.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iwai.cellmon.ui.fragment.locations
import android.widget.{LinearLayout, TextView}
import com.fortysevendeg.macroid.extras.LinearLayoutTweaks._
import com.fortysevendeg.macroid.extras.ResourcesExtras._
import com.fortysevendeg.macroid.extras.TextTweaks._
import com.fortysevendeg.macroid.extras.ViewTweaks._
import iwai.cellmon.R
import iwai.cellmon.model.core.entity.cell._
import iwai.cellmon.model.core.entity.location.Location
import macroid.{ContextWrapper, Tweak}
import scala.language.postfixOps
trait AdapterStyles {
}
trait ItemStyles {
def contentStyle(implicit context: ContextWrapper): Tweak[LinearLayout] =
vMatchParent +
vPaddings(resGetDimensionPixelSize(R.dimen.padding_default)) +
llVertical +
vBackground(R.drawable.background_list_default)
def changeAtStyle(implicit context: ContextWrapper): Tweak[TextView] =
vWrapContent +
tvColorResource(R.color.accent) +
tvSize(resGetInteger(R.integer.text_small))
def locationStyle(implicit context: ContextWrapper): Tweak[TextView] =
vWrapContent +
tvColorResource(R.color.primary) +
tvSize(resGetInteger(R.integer.text_big)) +
vPadding(0, resGetDimensionPixelSize(R.dimen.padding_default_extra_small))
def cellStyle(implicit context: ContextWrapper): Tweak[TextView] =
vWrapContent +
tvColorResource(R.color.primary_dark) +
tvSize(resGetInteger(R.integer.text_small)) +
vPadding(resGetDimensionPixelSize(R.dimen.padding_default), 0, 0, 0)
} | iwaiawi/cellmon | src/main/scala/iwai/cellmon/ui/fragment/locations/Styles.scala | Scala | apache-2.0 | 2,107 |
package macros.test
/**
* Created by cuz on 1/18/17.
*/
object MacrosClientTest extends App{
}
| cuzfrog/WebDriverServ | client/src/test/scala/macros/test/MacrosClientTest.scala | Scala | apache-2.0 | 103 |
package scala.collection.immutable
import reftree.core._
import reftree.util.Reflection.PrivateFields
/**
* [[ToRefTree]] instances for Scala immutable collections, which require access to private fields
*
* The package name is intentionally changed so that we can get access to some private fields and classes.
*/
trait HackedCollectionInstances extends CollectionInstances {
implicit def `Queue RefTree`[A: ToRefTree](implicit list: ToRefTree[List[A]]): ToRefTree[Queue[A]] =
ToRefTree[Queue[A]] { value ⇒
val front = value.privateField[List[A]]("out").refTree.toField.withName("front")
val back = value.privateField[List[A]]("in").refTree.toField.withName("back")
RefTree.Ref(value, Seq(front, back))
}
private def vectorArrayRefTree[A: ToRefTree](value: Array[AnyRef], depth: Int): RefTree = {
RefTree.Ref(value, value map { x ⇒
if (x == null) RefTree.Null()
else if (depth > 0) vectorArrayRefTree[A](x.asInstanceOf[Array[AnyRef]], depth - 1)
else x.asInstanceOf[A].refTree
} map (_.toField)).rename("Array")
}
implicit def `Vector RefTree`[A: ToRefTree]: ToRefTree[Vector[A]] = ToRefTree[Vector[A]] { value ⇒
val start = value.startIndex.refTree.toField.withName("start")
val end = value.endIndex.refTree.toField.withName("end")
val focus = RefTree.Val.formatted(value.privateField[Int]("focus"))(_.toBinaryString)
.toField.withName("focus")
val depth = value.depth.refTree.toField.withName("depth")
val layers = Seq(
value.display0, value.display1,
value.display2, value.display3,
value.display4, value.display5
).zipWithIndex.map {
case (layer, d) if d < value.depth ⇒ vectorArrayRefTree[A](layer, d)
case (layer, _) ⇒ RefTree.Null()
}.map(_.toField)
RefTree.Ref(
value,
Seq(start, end, focus, depth) ++ layers
)
}
implicit def `HashSet RefTree`[A: ToRefTree]: ToRefTree[HashSet[A]] =
ToRefTree[HashSet[A]] {
case leaf: HashSet.HashSet1[A] ⇒
val hash = RefTree.Val.formatted(leaf.privateField[Int]("hash"))(_.toHexString)
.toField.withName("hash")
val key = leaf.privateField[A]("key").refTree.toField
RefTree.Ref(leaf, Seq(hash, key)).rename("HashSet.HashSet1")
case collision: HashSet.HashSetCollision1[A] ⇒
val hash = RefTree.Val.formatted(collision.privateField[Int]("hash"))(_.toHexString)
.toField.withName("hash")
val ks = collision.privateField[ListSet[A]]("ks").refTree.toField
RefTree.Ref(collision, Seq(hash, ks)).rename("HashSet.HashSetCollision1")
case trie: HashSet.HashTrieSet[A] ⇒
val size = trie.privateField[Int]("size0").refTree.toField.withName("size")
val elems = trie.privateField[Array[HashSet[A]]]("elems").refTree.toField
val bitmap = RefTree.Val.formatted(trie.privateField[Int]("bitmap"))(_.toBinaryString)
.toField.withName("bitmap")
RefTree.Ref(trie, Seq(size, bitmap, elems)).rename("HashSet.HashTrieSet")
case empty ⇒
RefTree.Ref(empty, Seq.empty).rename("HashSet.EmptyHashSet")
}
implicit def `HashMap RefTree`[A: ToRefTree, B: ToRefTree]: ToRefTree[HashMap[A, B]] =
ToRefTree[HashMap[A, B]] {
case leaf: HashMap.HashMap1[A, B] ⇒
val hash = RefTree.Val.formatted(leaf.privateField[Int]("hash"))(_.toHexString)
.toField.withName("hash")
val key = leaf.privateField[A]("key").refTree.toField
val value = leaf.privateField[A]("value").refTree.toField
RefTree.Ref(leaf, Seq(hash, key, value)).rename("HashMap.HashMap1")
case collision: HashMap.HashMapCollision1[A, B] ⇒
val hash = RefTree.Val.formatted(collision.privateField[Int]("hash"))(_.toHexString)
.toField.withName("hash")
val kvs = collision.privateField[ListMap[A, B]]("kvs").refTree.toField
RefTree.Ref(collision, Seq(hash, kvs)).rename("HashMap.HashMapCollision1")
case trie: HashMap.HashTrieMap[A, B] ⇒
val size = trie.privateField[Int]("size0").refTree.toField.withName("size")
val elems = trie.privateField[Array[HashMap[A, B]]]("elems").refTree.toField
val bitmap = RefTree.Val.formatted(trie.privateField[Int]("bitmap"))(_.toBinaryString)
.toField.withName("bitmap")
RefTree.Ref(trie, Seq(size, bitmap, elems)).rename("HashMap.HashTrieMap")
case empty ⇒
RefTree.Ref(empty, Seq.empty).rename("HashMap.EmptyHashMap")
}
private def redBlackTreeRefTree[A: ToRefTree, B: ToRefTree](
tree: RedBlackTree.Tree[A, B],
includeValue: Boolean
): RefTree = {
if (tree == null) RefTree.Null() else {
val key = tree.key.refTree.toField
val value = if (includeValue) Seq(tree.value.refTree.toField) else Seq.empty
val left = redBlackTreeRefTree(tree.left, includeValue).toField
val right = redBlackTreeRefTree(tree.right, includeValue).toField
RefTree.Ref(tree, Seq(key) ++ value ++ Seq(left, right))
.copy(highlight = tree.isInstanceOf[RedBlackTree.RedTree[A, B]])
}
}
implicit def `TreeSet RefTree`[A: ToRefTree]: ToRefTree[TreeSet[A]] = {
implicit val unit = ToRefTree[Unit](_ ⇒ RefTree.Null())
ToRefTree[TreeSet[A]] { value ⇒
if (value.isEmpty) {
RefTree.Ref(value, Seq.empty)
} else {
val underlying = value.privateField[RedBlackTree.Tree[A, Unit]]("tree")
val children = redBlackTreeRefTree(underlying, includeValue = false).asInstanceOf[RefTree.Ref].children
RefTree.Ref(value, children)
}
}
}
implicit def `TreeMap RefTree`[A: ToRefTree, B: ToRefTree]: ToRefTree[TreeMap[A, B]] =
ToRefTree[TreeMap[A, B]] { value ⇒
if (value.isEmpty) {
RefTree.Ref(value, Seq.empty)
} else {
val underlying = value.privateField[RedBlackTree.Tree[A, B]]("tree")
val children = redBlackTreeRefTree(underlying, includeValue = true).asInstanceOf[RefTree.Ref].children
RefTree.Ref(value, children)
}
}
}
| stanch/reftree | core/jvm/src/main/scala/reftree/core/HackedCollectionInstances.scala | Scala | gpl-3.0 | 6,065 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qscript
import quasar.RenderTree
import scala.{Product, Serializable}
import scalaz.{Equal, Show}
sealed trait ShiftType extends Product with Serializable
object ShiftType {
case object Array extends ShiftType
case object Map extends ShiftType
implicit def equal: Equal[ShiftType] = Equal.equalRef
implicit def renderTree: RenderTree[ShiftType] = RenderTree.fromShow("ShiftType")
implicit def show: Show[ShiftType] = Show.showFromToString
}
| jedesah/Quasar | connector/src/main/scala/quasar/qscript/ShiftType.scala | Scala | apache-2.0 | 1,078 |
package controllers.auth
import javax.inject.Inject
import com.mohiva.play.silhouette.api.Authenticator.Implicits._
import com.mohiva.play.silhouette.api._
import com.mohiva.play.silhouette.api.exceptions.ProviderException
import com.mohiva.play.silhouette.api.repositories.AuthInfoRepository
import com.mohiva.play.silhouette.api.util.{ Clock, Credentials }
import com.mohiva.play.silhouette.impl.exceptions.IdentityNotFoundException
import com.mohiva.play.silhouette.impl.providers._
import controllers.{ WebJarAssets, auth, pages }
import forms.auth.SignInForm
import models.services.UserService
import net.ceedubs.ficus.Ficus._
import play.api.Configuration
import play.api.i18n.{ I18nSupport, Messages, MessagesApi }
import play.api.libs.concurrent.Execution.Implicits._
import play.api.mvc.{ Action, AnyContent, Controller }
import utils.auth.DefaultEnv
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.language.postfixOps
/**
* The `Sign In` controller.
*
* @param messagesApi The Play messages API.
* @param silhouette The Silhouette stack.
* @param userService The user service implementation.
* @param authInfoRepository The auth info repository implementation.
* @param credentialsProvider The credentials provider.
* @param socialProviderRegistry The social provider registry.
* @param configuration The Play configuration.
* @param clock The clock instance.
* @param webJarAssets The webjar assets implementation.
*/
class SignInController @Inject() (
val messagesApi: MessagesApi,
silhouette: Silhouette[DefaultEnv],
userService: UserService,
authInfoRepository: AuthInfoRepository,
credentialsProvider: CredentialsProvider,
socialProviderRegistry: SocialProviderRegistry,
configuration: Configuration,
clock: Clock,
implicit val webJarAssets: WebJarAssets)
extends Controller with I18nSupport {
/**
* Views the `Sign In` page.
*
* @return The result to display.
*/
def view: Action[AnyContent] = silhouette.UnsecuredAction.async { implicit request =>
Future.successful(Ok(views.html.auth.signIn(SignInForm.form, socialProviderRegistry)))
}
/**
* Handles the submitted form.
*
* @return The result to display.
*/
def submit: Action[AnyContent] = silhouette.UnsecuredAction.async { implicit request =>
SignInForm.form.bindFromRequest.fold(
form => Future.successful(BadRequest(views.html.auth.signIn(form, socialProviderRegistry))),
data => {
val credentials = Credentials(data.email, data.password)
credentialsProvider.authenticate(credentials).flatMap { loginInfo =>
val result = Redirect(pages.routes.ApplicationController.index())
userService.retrieve(loginInfo).flatMap {
case Some(user) if !user.activated =>
Future.successful(Ok(views.html.auth.activateAccount(data.email)))
case Some(user) =>
val c = configuration.underlying
silhouette.env.authenticatorService.create(loginInfo).map {
case authenticator if data.rememberMe =>
authenticator.copy(
expirationDateTime = clock.now + c.as[FiniteDuration]("silhouette.authenticator.rememberMe.authenticatorExpiry"),
idleTimeout = c.getAs[FiniteDuration]("silhouette.authenticator.rememberMe.authenticatorIdleTimeout"),
cookieMaxAge = c.getAs[FiniteDuration]("silhouette.authenticator.rememberMe.cookieMaxAge")
)
case authenticator => authenticator
}.flatMap { authenticator =>
silhouette.env.eventBus.publish(LoginEvent(user, request))
silhouette.env.authenticatorService.init(authenticator).flatMap { v =>
silhouette.env.authenticatorService.embed(v, result)
}
}
case None => Future.failed(new IdentityNotFoundException("Couldn't find user"))
}
}.recover {
case e: ProviderException =>
Redirect(auth.routes.SignInController.view()).flashing("error" -> Messages("invalid.credentials"))
}
}
)
}
}
| dpitkevics/play-silhouette-4.0-slick-postgres-seed | app/controllers/auth/SignInController.scala | Scala | apache-2.0 | 4,257 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.integration.torch
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.dllib.nn.{GradientChecker, View}
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.utils.RandomGenerator._
import scala.math._
import scala.util.Random
@com.intel.analytics.bigdl.tags.Serial
class ViewSpec extends TorchSpec {
"A View Container" should "generate correct output and grad" in {
torchCheck()
val module = new View[Double](2, 8)
val input = Tensor[Double](4, 4).randn()
val gradOutput = Tensor[Double](2, 8).randn()
val start = System.nanoTime()
val output = module.forward(input)
val gradInput = module.backward(input, gradOutput)
val end = System.nanoTime()
val scalaTime = end - start
val code = "output = module:forward(input)\\n" +
"gradInput = module:backward(input,gradOutput)"
val (luaTime, torchResult) = TH.run(code, Map("module" -> module, "input" -> input,
"gradOutput" -> gradOutput), Array("output", "gradInput"))
val luaOutput1 = torchResult("output").asInstanceOf[Tensor[Double]]
val luaOutput2 = torchResult("gradInput").asInstanceOf[Tensor[Double]]
luaOutput1.map(output, (v1, v2) => {
assert(abs(v1 - v2) == 0);
v1
})
luaOutput2.map(gradInput, (v1, v2) => {
assert(abs(v1 - v2) == 0);
v1
})
println("Test case : View, Torch : " + luaTime + " s, Scala : " + scalaTime / 1e9 + " s")
}
"View module" should "be good in gradient check for input" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val layer = new View[Double](2, 8)
val input = Tensor[Double](4, 4).apply1(e => Random.nextDouble())
val checker = new GradientChecker(1e-4)
checker.checkLayer[Double](layer, input, 1e-3) should be(true)
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/integration/torch/ViewSpec.scala | Scala | apache-2.0 | 2,459 |
package us.newsrdr.models
object Constants {
val ITEMS_PER_PAGE = 10
val LOGIN_URI = "/auth/login"
val AUTHENTICATED_URI = "/auth/authenticated"
val GOOGLE_CLIENT_ID = "51702830260-toudua1ufu12a2f7rbge9c0jpbhoqej2.apps.googleusercontent.com"
val GOOGLE_CLIENT_SECRET = "INSERT SECRET HERE"
def getURL(request: javax.servlet.http.HttpServletRequest, uri: String) : String = {
("https://" + request.getServerName() + uri)
}
def getAuthenticatedURL(request: javax.servlet.http.HttpServletRequest, service: String) : String = {
(getURL(request, Constants.AUTHENTICATED_URI + "/" + service))
}
def getGoogleLoginURL(request: javax.servlet.http.HttpServletRequest) : String = {
// TODO: CSRF verification using state variable.
"https://accounts.google.com/o/oauth2/auth?scope=email+profile&state=xyz&redirect_uri=" +
getAuthenticatedURL(request, "google") +
"&response_type=code&client_id=" + GOOGLE_CLIENT_ID + "&access_type=online"
}
}
class ApiResult(success: Boolean, error_string: Option[String])
case class StringDataApiResult(success: Boolean, error_string: Option[String], data: String)
extends ApiResult(success, error_string)
case class AddFeedListApiResult(success: Boolean, error_string: Option[String], data: List[AddFeedEntry])
extends ApiResult(success, error_string)
case class NoDataApiResult(success: Boolean, error_string: Option[String])
extends ApiResult(success, error_string)
case class FeedInfoApiResult(success: Boolean, error_string: Option[String], data: NewsFeedInfo)
extends ApiResult(success, error_string)
case class FeedListApiResult(success: Boolean, error_string: Option[String], data: List[NewsFeedInfo])
extends ApiResult(success, error_string)
case class ArticleListWithMaxId(id: Long, list: List[NewsFeedArticleInfo])
case class SavedArticleListWithMaxId(id: Long, list: List[NewsFeedArticleInfoWithFeed])
case class ArticleListApiResult(success: Boolean, error_string: Option[String], data: ArticleListWithMaxId)
extends ApiResult(success, error_string)
| tmiw/newsrdr | src/main/scala/us/newsrdr/models/ApiResult.scala | Scala | bsd-3-clause | 2,071 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.columnar
import java.nio.{ByteBuffer, ByteOrder}
import org.apache.spark.sql.catalyst.InternalRow
/**
* A stackable trait used for building byte buffer for a column containing null values. Memory
* layout of the final byte buffer is:
* {{{
* .----------------------- Column type ID (4 bytes)
* | .------------------- Null count N (4 bytes)
* | | .--------------- Null positions (4 x N bytes, empty if null count is zero)
* | | | .--------- Non-null elements
* V V V V
* +---+---+-----+---------+
* | | | ... | ... ... |
* +---+---+-----+---------+
* }}}
*/
private[sql] trait NullableColumnBuilder extends ColumnBuilder {
protected var nulls: ByteBuffer = _
protected var nullCount: Int = _
private var pos: Int = _
abstract override def initialize(
initialSize: Int,
columnName: String,
useCompression: Boolean): Unit = {
nulls = ByteBuffer.allocate(1024)
nulls.order(ByteOrder.nativeOrder())
pos = 0
nullCount = 0
super.initialize(initialSize, columnName, useCompression)
}
abstract override def appendFrom(row: InternalRow, ordinal: Int): Unit = {
columnStats.gatherStats(row, ordinal)
if (row.isNullAt(ordinal)) {
nulls = ColumnBuilder.ensureFreeSpace(nulls, 4)
nulls.putInt(pos)
nullCount += 1
} else {
super.appendFrom(row, ordinal)
}
pos += 1
}
abstract override def build(): ByteBuffer = {
val nonNulls = super.build()
val typeId = nonNulls.getInt()
val nullDataLen = nulls.position()
nulls.limit(nullDataLen)
nulls.rewind()
val buffer = ByteBuffer
.allocate(4 + 4 + nullDataLen + nonNulls.remaining())
.order(ByteOrder.nativeOrder())
.putInt(typeId)
.putInt(nullCount)
.put(nulls)
.put(nonNulls)
buffer.rewind()
buffer
}
protected def buildNonNulls(): ByteBuffer = {
nulls.limit(nulls.position()).rewind()
super.build()
}
}
| ArvinDevel/onlineAggregationOnSparkV2 | sql/core/src/main/scala/org/apache/spark/sql/columnar/NullableColumnBuilder.scala | Scala | apache-2.0 | 2,816 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.livy.utils
import java.io.InputStream
import java.util
import java.util.concurrent.locks.ReentrantLock
import scala.io.Source
import org.apache.livy.Logging
class CircularQueue[T](var capacity: Int) extends util.LinkedList[T] {
override def add(t: T): Boolean = {
if (size >= capacity) removeFirst
super.add(t)
}
}
class LineBufferedStream(inputStream: InputStream, logSize: Int) extends Logging {
private[this] val _lines: CircularQueue[String] = new CircularQueue[String](logSize)
private[this] val _lock = new ReentrantLock()
private[this] val _condition = _lock.newCondition()
private[this] var _finished = false
private val thread = new Thread {
override def run() = {
val lines = Source.fromInputStream(inputStream).getLines()
for (line <- lines) {
info(line)
_lock.lock()
try {
_lines.add(line)
_condition.signalAll()
} finally {
_lock.unlock()
}
}
_lock.lock()
try {
_finished = true
_condition.signalAll()
} finally {
_lock.unlock()
}
}
}
thread.setDaemon(true)
thread.start()
def lines: IndexedSeq[String] = {
_lock.lock()
val lines = IndexedSeq.empty[String] ++ _lines.toArray(Array.empty[String])
_lock.unlock()
lines
}
def iterator: Iterator[String] = {
new LinesIterator
}
def waitUntilClose(): Unit = thread.join()
private class LinesIterator extends Iterator[String] {
override def hasNext: Boolean = {
if (_lines.size > 0) {
true
} else {
// Otherwise we might still have more data.
_lock.lock()
try {
if (_finished) {
false
} else {
_condition.await()
_lines.size > 0
}
} finally {
_lock.unlock()
}
}
}
override def next(): String = {
_lock.lock()
val line = _lines.poll()
_lock.unlock()
line
}
}
}
| ajbozarth/incubator-livy | server/src/main/scala/org/apache/livy/utils/LineBufferedStream.scala | Scala | apache-2.0 | 2,841 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.{Equality, Every, One, Many, Entry}
import org.scalactic.StringNormalizations._
import SharedHelpers._
import FailureMessages.decorateToStringValue
import Matchers._
import exceptions.TestFailedException
class EveryShouldContainAllElementsOfSpec extends Spec {
private def upperCase(value: Any): Any =
value match {
case l: Every[_] => l.map(upperCase(_))
case s: String => s.toUpperCase
case c: Char => c.toString.toUpperCase.charAt(0)
case (s1: String, s2: String) => (s1.toUpperCase, s2.toUpperCase)
case e: java.util.Map.Entry[_, _] =>
(e.getKey, e.getValue) match {
case (k: String, v: String) => Entry(k.toUpperCase, v.toUpperCase)
case _ => value
}
case _ => value
}
val upperCaseStringEquality =
new Equality[String] {
def areEqual(a: String, b: Any): Boolean = upperCase(a) == upperCase(b)
}
//ADDITIONAL//
object `an Every` {
val fumList: Every[String] = Every("fex", "fum", "foe", "fie", "fee")
val toList: Every[String] = Every("too", "you", "to", "birthday", "happy")
object `when used with contain allElementsOf Seq(..)` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
fumList should contain allElementsOf Seq("fee", "fie", "foe", "fum")
val e1 = intercept[TestFailedException] {
fumList should contain allElementsOf Seq("happy", "birthday", "to", "you")
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (FailureMessages.didNotContainAllElementsOf(fumList, Seq("happy", "birthday", "to", "you")))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
fumList should contain allElementsOf Seq("FEE", "FIE", "FOE", "FUM")
intercept[TestFailedException] {
fumList should contain allElementsOf Seq("fee", "fie", "foe", "fam")
}
}
def `should use an explicitly provided Equality` {
(fumList should contain allElementsOf Seq("FEE", "FIE", "FOE", "FUM")) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(fumList should contain allElementsOf Seq("fee", "fie", "foe", "fam")) (decided by upperCaseStringEquality)
}
intercept[TestFailedException] {
fumList should contain allElementsOf Seq(" FEE ", " FIE ", " FOE ", " FUM ")
}
(fumList should contain allElementsOf Seq(" FEE ", " FIE ", " FOE ", " FUM ")) (after being lowerCased and trimmed)
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should contain allElementsOf Seq("fee", "fie", "foe", "fie", "fum")
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.allElementsOfDuplicate))
}
}
object `when used with (contain allElementsOf Seq(..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
fumList should (contain allElementsOf Seq("fee", "fie", "foe", "fum"))
val e1 = intercept[TestFailedException] {
fumList should (contain allElementsOf Seq("happy", "birthday", "to", "you"))
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (FailureMessages.didNotContainAllElementsOf(fumList, Seq("happy", "birthday", "to", "you")))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
fumList should (contain allElementsOf Seq("FEE", "FIE", "FOE", "FUM"))
intercept[TestFailedException] {
fumList should (contain allElementsOf Seq("fee", "fie", "foe", "fam"))
}
}
def `should use an explicitly provided Equality` {
(fumList should (contain allElementsOf Seq("FEE", "FIE", "FOE", "FUM"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(fumList should (contain allElementsOf Seq("fee", "fie", "foe", "fam"))) (decided by upperCaseStringEquality)
}
intercept[TestFailedException] {
fumList should (contain allElementsOf Seq(" FEE ", " FIE ", " FOE ", " FUM "))
}
(fumList should (contain allElementsOf Seq(" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed)
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (contain allElementsOf Seq("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.allElementsOfDuplicate))
}
}
object `when used with not contain allElementsOf Seq(..)` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
toList should not contain allElementsOf (Seq("fee", "fie", "foe", "fum"))
val e1 = intercept[TestFailedException] {
toList should not contain allElementsOf (Seq("happy", "birthday", "to", "you"))
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (FailureMessages.containedAllElementsOf(toList, Seq("happy", "birthday", "to", "you")))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
toList should not contain allElementsOf (Seq("happy", "birthday", "to", "you", "dear"))
intercept[TestFailedException] {
toList should not contain allElementsOf (Seq("HAPPY", "BIRTHDAY", "TO", "YOU"))
}
}
def `should use an explicitly provided Equality` {
(toList should not contain allElementsOf (Seq("happy", "birthday", "to", "you", "dear"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(toList should not contain allElementsOf (Seq("HAPPY", "BIRTHDAY", "TO", "YOU"))) (decided by upperCaseStringEquality)
}
toList should not contain allElementsOf (Seq(" HAPPY ", " BIRTHDAY ", " TO ", " YOU "))
intercept[TestFailedException] {
(toList should not contain allElementsOf (Seq(" HAPPY ", " BIRTHDAY ", " TO ", " YOU "))) (after being lowerCased and trimmed)
}
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
toList should not contain allElementsOf (Seq("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.allElementsOfDuplicate))
}
}
object `when used with (not contain allElementsOf Seq(..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
toList should (not contain allElementsOf (Seq("HAPPY", "BIRTHDAY", "TO", "YOU", "DEAR")))
val e1 = intercept[TestFailedException] {
toList should (not contain allElementsOf (Seq("happy", "birthday", "to", "you")))
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (FailureMessages.containedAllElementsOf(toList, Seq("happy", "birthday", "to", "you")))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
toList should (not contain allElementsOf (Seq("NICE", "TO", "MEET", "YOU", "TOO")))
intercept[TestFailedException] {
toList should (not contain allElementsOf (Seq("HAPPY", "BIRTHDAY", "TO", "YOU")))
}
}
def `should use an explicitly provided Equality` {
(toList should (not contain allElementsOf (Seq("NICE", "TO", "MEET", "YOU", "TOO")))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(toList should (not contain allElementsOf (Seq("HAPPY", "BIRTHDAY", "TO", "YOU")))) (decided by upperCaseStringEquality)
}
toList should (not contain allElementsOf (Seq(" HAPPY ", " BIRTHDAY ", " TO ", " YOU ")))
intercept[TestFailedException] {
(toList should (not contain allElementsOf (Seq(" HAPPY ", " BIRTHDAY ", " TO ", " YOU ")))) (after being lowerCased and trimmed)
}
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
toList should (not contain allElementsOf (Seq("fee", "fie", "foe", "fie", "fum")))
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.allElementsOfDuplicate))
}
}
object `when used with shouldNot contain allElementsOf Seq(..)` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
toList shouldNot contain allElementsOf Seq("fee", "fie", "foe", "fum")
val e1 = intercept[TestFailedException] {
toList shouldNot contain allElementsOf Seq("happy", "birthday", "to", "you")
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (FailureMessages.containedAllElementsOf(toList, Seq("happy", "birthday", "to", "you")))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
toList shouldNot contain allElementsOf Seq("happy", "birthday", "to", "you", "dear")
intercept[TestFailedException] {
toList shouldNot contain allElementsOf Seq("HAPPY", "BIRTHDAY", "TO", "YOU")
}
}
def `should use an explicitly provided Equality` {
(toList shouldNot contain allElementsOf Seq("happy", "birthday", "to", "you", "dear")) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(toList shouldNot contain allElementsOf Seq("HAPPY", "BIRTHDAY", "TO", "YOU")) (decided by upperCaseStringEquality)
}
toList shouldNot contain allElementsOf Seq(" HAPPY ", " BIRTHDAY ", " TO ", " YOU ")
intercept[TestFailedException] {
(toList shouldNot contain allElementsOf Seq(" HAPPY ", " BIRTHDAY ", " TO ", " YOU ")) (after being lowerCased and trimmed)
}
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
toList shouldNot contain allElementsOf Seq("fee", "fie", "foe", "fie", "fum")
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.allElementsOfDuplicate))
}
}
object `when used with shouldNot (contain allElementsOf Seq(..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
toList shouldNot (contain allElementsOf Seq("HAPPY", "BIRTHDAY", "TO", "YOU", "DEAR"))
val e1 = intercept[TestFailedException] {
toList shouldNot (contain allElementsOf Seq("happy", "birthday", "to", "you"))
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (FailureMessages.containedAllElementsOf(toList, Seq("happy", "birthday", "to", "you")))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
toList shouldNot (contain allElementsOf Seq("NICE", "TO", "MEET", "YOU", "TOO"))
intercept[TestFailedException] {
toList shouldNot (contain allElementsOf Seq("HAPPY", "BIRTHDAY", "TO", "YOU"))
}
}
def `should use an explicitly provided Equality` {
(toList shouldNot (contain allElementsOf Seq("NICE", "TO", "MEET", "YOU", "TOO"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(toList shouldNot (contain allElementsOf Seq("HAPPY", "BIRTHDAY", "TO", "YOU"))) (decided by upperCaseStringEquality)
}
toList shouldNot (contain allElementsOf Seq(" HAPPY ", " BIRTHDAY ", " TO ", " YOU "))
intercept[TestFailedException] {
(toList shouldNot (contain allElementsOf Seq(" HAPPY ", " BIRTHDAY ", " TO ", " YOU "))) (after being lowerCased and trimmed)
}
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
toList shouldNot (contain allElementsOf Seq("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.allElementsOfDuplicate))
}
}
}
object `an every of Everys` {
val list1s: Every[Every[Int]] = Every(Every(3, 2, 1, 0), Every(3, 2, 1, 0), Every(3, 2, 1, 0))
val lists: Every[Every[Int]] = Every(Every(3, 2, 1, 0), Every(3, 2, 1, 0), Every(8, 4, 3, 2))
val hiLists: Every[Every[String]] = Every(Every("howdy", "hi", "he"), Every("howdy", "hi", "he"), Every("howdy", "hi", "he"))
val toLists: Every[Every[String]] = Every(Every("happy", "to", "you"), Every("happy", "to", "you"), Every("happy", "to", "you"))
object `when used with contain allElementsOf Seq(..)` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (list1s) should contain allElementsOf Seq(1, 2, 3)
atLeast (2, lists) should contain allElementsOf Seq(1, 2, 3)
atMost (2, lists) should contain allElementsOf Seq(1, 2, 3)
no (lists) should contain allElementsOf Seq(3, 4, 5)
val e1 = intercept[TestFailedException] {
all (lists) should contain allElementsOf Seq(1, 2, 3)
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, " + decorateToStringValue(Many(8, 4, 3, 2)) + " did not contain all elements of " + decorateToStringValue(List(1, 2, 3)) + " (EveryShouldContainAllElementsOfSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(lists)))
}
def `should use the implicit Equality in scope` {
all (hiLists) should contain allElementsOf Seq("he", "hi")
intercept[TestFailedException] {
all (hiLists) should contain allElementsOf Seq("ho", "hi")
}
implicit val ise = upperCaseStringEquality
all (hiLists) should contain allElementsOf Seq("HE", "HI")
intercept[TestFailedException] {
all (hiLists) should contain allElementsOf Seq("HO", "HI")
}
}
def `should use an explicitly provided Equality` {
(all (hiLists) should contain allElementsOf Seq("HE", "HI")) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (hiLists) should contain allElementsOf Seq("HO", "HI")) (decided by upperCaseStringEquality)
}
implicit val ise = upperCaseStringEquality
(all (hiLists) should contain allElementsOf Seq("he", "hi")) (decided by defaultEquality[String])
intercept[TestFailedException] {
(all (hiLists) should contain allElementsOf Seq("ho", "hi")) (decided by defaultEquality[String])
}
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should contain allElementsOf Seq(1, 2, 2, 3)
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.allElementsOfDuplicate))
}
}
object `when used with (contain allElementsOf Seq(..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (list1s) should (contain allElementsOf Seq(1, 2, 3))
atLeast (2, lists) should (contain allElementsOf Seq(1, 2, 3))
atMost (2, lists) should (contain allElementsOf Seq(1, 2, 3))
no (lists) should (contain allElementsOf Seq(3, 4, 5))
val e1 = intercept[TestFailedException] {
all (lists) should (contain allElementsOf Seq(1, 2, 3))
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, " + decorateToStringValue(Many(8, 4, 3, 2)) + " did not contain all elements of " + decorateToStringValue(List(1, 2, 3)) + " (EveryShouldContainAllElementsOfSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(lists)))
}
def `should use the implicit Equality in scope` {
all (hiLists) should (contain allElementsOf Seq("he", "hi"))
intercept[TestFailedException] {
all (hiLists) should (contain allElementsOf Seq("ho", "hi"))
}
implicit val ise = upperCaseStringEquality
all (hiLists) should (contain allElementsOf Seq("HE", "HI"))
intercept[TestFailedException] {
all (hiLists) should (contain allElementsOf Seq("HO", "HI"))
}
}
def `should use an explicitly provided Equality` {
(all (hiLists) should (contain allElementsOf Seq("HE", "HI"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (hiLists) should (contain allElementsOf Seq("HO", "HI"))) (decided by upperCaseStringEquality)
}
implicit val ise = upperCaseStringEquality
(all (hiLists) should (contain allElementsOf Seq("he", "hi"))) (decided by defaultEquality[String])
intercept[TestFailedException] {
(all (hiLists) should (contain allElementsOf Seq("ho", "hi"))) (decided by defaultEquality[String])
}
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (contain allElementsOf Seq(1, 2, 2, 3))
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.allElementsOfDuplicate))
}
}
object `when used with not contain allElementsOf Seq(..)` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (toLists) should not contain allElementsOf (Seq("fee", "fie", "foe", "fum"))
val e1 = intercept[TestFailedException] {
all (toLists) should not contain allElementsOf (Seq("you", "to"))
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 0, " + decorateToStringValue(Many("happy", "to", "you")) + " contained all elements of " + decorateToStringValue(List("you", "to")) + " (EveryShouldContainAllElementsOfSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(toLists)))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
all (toLists) should not contain allElementsOf (Seq("NICE", "MEET", "YOU"))
intercept[TestFailedException] {
all (toLists) should not contain allElementsOf (Seq("YOU", "TO"))
}
}
def `should use an explicitly provided Equality` {
(all (toLists) should not contain allElementsOf (Seq("NICE", "MEET", "YOU"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (toLists) should not contain allElementsOf (Seq("YOU", "TO"))) (decided by upperCaseStringEquality)
}
all (toLists) should not contain allElementsOf (Seq(" YOU ", " TO "))
intercept[TestFailedException] {
(all (toLists) should not contain allElementsOf (Seq(" YOU ", " TO "))) (after being lowerCased and trimmed)
}
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
all (toLists) should not contain allElementsOf (Seq("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.allElementsOfDuplicate))
}
}
object `when used with (not contain allElementsOf Seq(..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (toLists) should (not contain allElementsOf (Seq("fee", "fie", "foe", "fum")))
val e1 = intercept[TestFailedException] {
all (toLists) should (not contain allElementsOf (Seq("you", "to")))
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 0, " + decorateToStringValue(Many("happy", "to", "you")) + " contained all elements of " + decorateToStringValue(List("you", "to")) + " (EveryShouldContainAllElementsOfSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(toLists)))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
all (toLists) should (not contain allElementsOf (Seq("NICE", "MEET", "YOU")))
intercept[TestFailedException] {
all (toLists) should (not contain allElementsOf (Seq("YOU", "TO")))
}
}
def `should use an explicitly provided Equality` {
(all (toLists) should (not contain allElementsOf (Seq("NICE", "MEET", "YOU")))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (toLists) should (not contain allElementsOf (Seq("YOU", "TO")))) (decided by upperCaseStringEquality)
}
all (toLists) should (not contain allElementsOf (Seq(" YOU ", " TO ")))
intercept[TestFailedException] {
(all (toLists) should (not contain allElementsOf (Seq(" YOU ", " TO ")))) (after being lowerCased and trimmed)
}
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
all (toLists) should (not contain allElementsOf (Seq("fee", "fie", "foe", "fie", "fum")))
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.allElementsOfDuplicate))
}
}
object `when used with shouldNot contain allElementsOf Seq(..)` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (toLists) shouldNot contain allElementsOf Seq("fee", "fie", "foe", "fum")
val e1 = intercept[TestFailedException] {
all (toLists) shouldNot contain allElementsOf Seq("you", "to")
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 0, " + decorateToStringValue(Many("happy", "to", "you")) + " contained all elements of " + decorateToStringValue(List("you", "to")) + " (EveryShouldContainAllElementsOfSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(toLists)))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
all (toLists) shouldNot contain allElementsOf Seq("NICE", "MEET", "YOU")
intercept[TestFailedException] {
all (toLists) shouldNot contain allElementsOf Seq("YOU", "TO")
}
}
def `should use an explicitly provided Equality` {
(all (toLists) shouldNot contain allElementsOf Seq("NICE", "MEET", "YOU")) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (toLists) shouldNot contain allElementsOf Seq("YOU", "TO")) (decided by upperCaseStringEquality)
}
all (toLists) shouldNot contain allElementsOf Seq(" YOU ", " TO ")
intercept[TestFailedException] {
(all (toLists) shouldNot contain allElementsOf Seq(" YOU ", " TO ")) (after being lowerCased and trimmed)
}
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
all (toLists) shouldNot contain allElementsOf Seq("fee", "fie", "foe", "fie", "fum")
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.allElementsOfDuplicate))
}
}
object `when used with shouldNot (contain allElementsOf Seq(..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (toLists) shouldNot (contain allElementsOf Seq("fee", "fie", "foe", "fum"))
val e1 = intercept[TestFailedException] {
all (toLists) shouldNot (contain allElementsOf Seq("you", "to"))
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 0, " + decorateToStringValue(Many("happy", "to", "you")) + " contained all elements of " + decorateToStringValue(List("you", "to")) + " (EveryShouldContainAllElementsOfSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(toLists)))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
all (toLists) shouldNot (contain allElementsOf Seq("NICE", "MEET", "YOU"))
intercept[TestFailedException] {
all (toLists) shouldNot (contain allElementsOf Seq("YOU", "TO"))
}
}
def `should use an explicitly provided Equality` {
(all (toLists) shouldNot (contain allElementsOf Seq("NICE", "MEET", "YOU"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (toLists) shouldNot (contain allElementsOf Seq("YOU", "TO"))) (decided by upperCaseStringEquality)
}
all (toLists) shouldNot (contain allElementsOf Seq(" YOU ", " TO "))
intercept[TestFailedException] {
(all (toLists) shouldNot (contain allElementsOf Seq(" YOU ", " TO "))) (after being lowerCased and trimmed)
}
}
def `should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value` {
val e1 = intercept[exceptions.NotAllowedException] {
all (toLists) shouldNot (contain allElementsOf Seq("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be ("EveryShouldContainAllElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(FailureMessages.allElementsOfDuplicate))
}
}
}
}
| rahulkavale/scalatest | scalatest-test/src/test/scala/org/scalatest/EveryShouldContainAllElementsOfSpec.scala | Scala | apache-2.0 | 30,327 |
object Test {
def printList(in: List[String]): Unit = in match {
case Nil => Unit
case (s: String) :: Nil =>
println(s)
case head :: (s: String) :: Nil =>
printList(head :: Nil)
for(i <- head){
print(i)
}
println
println(s)
}
}
| som-snytt/dotty | tests/patmat/t4408.scala | Scala | apache-2.0 | 290 |
package com.asto.dop.streamprocessor.process
import com.asto.dop.core.CoreModel
import com.asto.dop.streamprocessor.DOPContext
import org.apache.spark.Logging
import org.apache.spark.rdd.RDD
object ProcessFactory extends Serializable with Logging {
def process(rdd: RDD[String], context: DOPContext): Unit = {
val messages: RDD[CoreModel] = rdd.map {
line =>
log.debug(s"Received a message : $line")
CoreModel(line)
}
ESProcessor.save(messages, context)
/* HDFSProcessor.save(rdd, context)
RedisProcessor.save(messages, context)
HBaseProcessor.saveToBasic(messages, context)*/
}
}
| zj-lingxin/dop | source/stream-processor/src/main/scala/com/asto/dop/streamprocessor/process/ProcessFactory.scala | Scala | mit | 641 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.linalg
import java.util.{Arrays, Random}
import scala.collection.mutable.{ArrayBuffer, ArrayBuilder => MArrayBuilder, HashSet => MHashSet}
import breeze.linalg.{CSCMatrix => BSM, DenseMatrix => BDM, Matrix => BM}
import com.github.fommil.netlib.BLAS.{getInstance => blas}
import org.apache.spark.annotation.Since
/**
* Trait for a local matrix.
*/
@Since("2.0.0")
sealed trait Matrix extends Serializable {
/** Number of rows. */
@Since("2.0.0")
def numRows: Int
/** Number of columns. */
@Since("2.0.0")
def numCols: Int
/** Flag that keeps track whether the matrix is transposed or not. False by default. */
@Since("2.0.0")
val isTransposed: Boolean = false
/** Indicates whether the values backing this matrix are arranged in column major order. */
private[ml] def isColMajor: Boolean = !isTransposed
/** Indicates whether the values backing this matrix are arranged in row major order. */
private[ml] def isRowMajor: Boolean = isTransposed
/** Converts to a dense array in column major. */
@Since("2.0.0")
def toArray: Array[Double] = {
val newArray = new Array[Double](numRows * numCols)
foreachActive { (i, j, v) =>
newArray(j * numRows + i) = v
}
newArray
}
/**
* Returns an iterator of column vectors.
* This operation could be expensive, depending on the underlying storage.
*/
@Since("2.0.0")
def colIter: Iterator[Vector]
/**
* Returns an iterator of row vectors.
* This operation could be expensive, depending on the underlying storage.
*/
@Since("2.0.0")
def rowIter: Iterator[Vector] = this.transpose.colIter
/** Converts to a breeze matrix. */
private[ml] def asBreeze: BM[Double]
/** Gets the (i, j)-th element. */
@Since("2.0.0")
def apply(i: Int, j: Int): Double
/** Return the index for the (i, j)-th element in the backing array. */
private[ml] def index(i: Int, j: Int): Int
/** Update element at (i, j) */
private[ml] def update(i: Int, j: Int, v: Double): Unit
/** Get a deep copy of the matrix. */
@Since("2.0.0")
def copy: Matrix
/**
* Transpose the Matrix. Returns a new `Matrix` instance sharing the same underlying data.
*/
@Since("2.0.0")
def transpose: Matrix
/**
* Convenience method for `Matrix`-`DenseMatrix` multiplication.
*/
@Since("2.0.0")
def multiply(y: DenseMatrix): DenseMatrix = {
val C: DenseMatrix = DenseMatrix.zeros(numRows, y.numCols)
BLAS.gemm(1.0, this, y, 0.0, C)
C
}
/**
* Convenience method for `Matrix`-`DenseVector` multiplication. For binary compatibility.
*/
@Since("2.0.0")
def multiply(y: DenseVector): DenseVector = {
multiply(y.asInstanceOf[Vector])
}
/**
* Convenience method for `Matrix`-`Vector` multiplication.
*/
@Since("2.0.0")
def multiply(y: Vector): DenseVector = {
val output = new DenseVector(new Array[Double](numRows))
BLAS.gemv(1.0, this, y, 0.0, output)
output
}
/** A human readable representation of the matrix */
override def toString: String = asBreeze.toString()
/** A human readable representation of the matrix with maximum lines and width */
@Since("2.0.0")
def toString(maxLines: Int, maxLineWidth: Int): String = asBreeze.toString(maxLines, maxLineWidth)
/**
* Map the values of this matrix using a function. Generates a new matrix. Performs the
* function on only the backing array. For example, an operation such as addition or
* subtraction will only be performed on the non-zero values in a `SparseMatrix`.
*/
private[spark] def map(f: Double => Double): Matrix
/**
* Update all the values of this matrix using the function f. Performed in-place on the
* backing array. For example, an operation such as addition or subtraction will only be
* performed on the non-zero values in a `SparseMatrix`.
*/
private[ml] def update(f: Double => Double): Matrix
/**
* Applies a function `f` to all the active elements of dense and sparse matrix. The ordering
* of the elements are not defined.
*
* @param f the function takes three parameters where the first two parameters are the row
* and column indices respectively with the type `Int`, and the final parameter is the
* corresponding value in the matrix with type `Double`.
*/
@Since("2.2.0")
def foreachActive(f: (Int, Int, Double) => Unit): Unit
/**
* Find the number of non-zero active values.
*/
@Since("2.0.0")
def numNonzeros: Int
/**
* Find the number of values stored explicitly. These values can be zero as well.
*/
@Since("2.0.0")
def numActives: Int
/**
* Converts this matrix to a sparse matrix.
*
* @param colMajor Whether the values of the resulting sparse matrix should be in column major
* or row major order. If `false`, resulting matrix will be row major.
*/
private[ml] def toSparseMatrix(colMajor: Boolean): SparseMatrix
/**
* Converts this matrix to a sparse matrix in column major order.
*/
@Since("2.2.0")
def toSparseColMajor: SparseMatrix = toSparseMatrix(colMajor = true)
/**
* Converts this matrix to a sparse matrix in row major order.
*/
@Since("2.2.0")
def toSparseRowMajor: SparseMatrix = toSparseMatrix(colMajor = false)
/**
* Converts this matrix to a sparse matrix while maintaining the layout of the current matrix.
*/
@Since("2.2.0")
def toSparse: SparseMatrix = toSparseMatrix(colMajor = isColMajor)
/**
* Converts this matrix to a dense matrix.
*
* @param colMajor Whether the values of the resulting dense matrix should be in column major
* or row major order. If `false`, resulting matrix will be row major.
*/
private[ml] def toDenseMatrix(colMajor: Boolean): DenseMatrix
/**
* Converts this matrix to a dense matrix while maintaining the layout of the current matrix.
*/
@Since("2.2.0")
def toDense: DenseMatrix = toDenseMatrix(colMajor = isColMajor)
/**
* Converts this matrix to a dense matrix in row major order.
*/
@Since("2.2.0")
def toDenseRowMajor: DenseMatrix = toDenseMatrix(colMajor = false)
/**
* Converts this matrix to a dense matrix in column major order.
*/
@Since("2.2.0")
def toDenseColMajor: DenseMatrix = toDenseMatrix(colMajor = true)
/**
* Returns a matrix in dense or sparse column major format, whichever uses less storage.
*/
@Since("2.2.0")
def compressedColMajor: Matrix = {
if (getDenseSizeInBytes <= getSparseSizeInBytes(colMajor = true)) {
this.toDenseColMajor
} else {
this.toSparseColMajor
}
}
/**
* Returns a matrix in dense or sparse row major format, whichever uses less storage.
*/
@Since("2.2.0")
def compressedRowMajor: Matrix = {
if (getDenseSizeInBytes <= getSparseSizeInBytes(colMajor = false)) {
this.toDenseRowMajor
} else {
this.toSparseRowMajor
}
}
/**
* Returns a matrix in dense column major, dense row major, sparse row major, or sparse column
* major format, whichever uses less storage. When dense representation is optimal, it maintains
* the current layout order.
*/
@Since("2.2.0")
def compressed: Matrix = {
val cscSize = getSparseSizeInBytes(colMajor = true)
val csrSize = getSparseSizeInBytes(colMajor = false)
if (getDenseSizeInBytes <= math.min(cscSize, csrSize)) {
// dense matrix size is the same for column major and row major, so maintain current layout
this.toDense
} else if (cscSize <= csrSize) {
this.toSparseColMajor
} else {
this.toSparseRowMajor
}
}
/** Gets the size of the dense representation of this `Matrix`. */
private[ml] def getDenseSizeInBytes: Long = {
Matrices.getDenseSize(numCols, numRows)
}
/** Gets the size of the minimal sparse representation of this `Matrix`. */
private[ml] def getSparseSizeInBytes(colMajor: Boolean): Long = {
val nnz = numNonzeros
val numPtrs = if (colMajor) numCols + 1L else numRows + 1L
Matrices.getSparseSize(nnz, numPtrs)
}
/** Gets the current size in bytes of this `Matrix`. Useful for testing */
private[ml] def getSizeInBytes: Long
}
/**
* Column-major dense matrix.
* The entry values are stored in a single array of doubles with columns listed in sequence.
* For example, the following matrix
* {{{
* 1.0 2.0
* 3.0 4.0
* 5.0 6.0
* }}}
* is stored as `[1.0, 3.0, 5.0, 2.0, 4.0, 6.0]`.
*
* @param numRows number of rows
* @param numCols number of columns
* @param values matrix entries in column major if not transposed or in row major otherwise
* @param isTransposed whether the matrix is transposed. If true, `values` stores the matrix in
* row major.
*/
@Since("2.0.0")
class DenseMatrix @Since("2.0.0") (
@Since("2.0.0") val numRows: Int,
@Since("2.0.0") val numCols: Int,
@Since("2.0.0") val values: Array[Double],
override val isTransposed: Boolean) extends Matrix {
require(values.length == numRows * numCols, "The number of values supplied doesn't match the " +
s"size of the matrix! values.length: ${values.length}, numRows * numCols: ${numRows * numCols}")
/**
* Column-major dense matrix.
* The entry values are stored in a single array of doubles with columns listed in sequence.
* For example, the following matrix
* {{{
* 1.0 2.0
* 3.0 4.0
* 5.0 6.0
* }}}
* is stored as `[1.0, 3.0, 5.0, 2.0, 4.0, 6.0]`.
*
* @param numRows number of rows
* @param numCols number of columns
* @param values matrix entries in column major
*/
@Since("2.0.0")
def this(numRows: Int, numCols: Int, values: Array[Double]) =
this(numRows, numCols, values, false)
override def equals(o: Any): Boolean = o match {
case m: Matrix => asBreeze == m.asBreeze
case _ => false
}
override def hashCode: Int = {
Seq(numRows, numCols, toArray).##
}
private[ml] def asBreeze: BM[Double] = {
if (!isTransposed) {
new BDM[Double](numRows, numCols, values)
} else {
val breezeMatrix = new BDM[Double](numCols, numRows, values)
breezeMatrix.t
}
}
private[ml] def apply(i: Int): Double = values(i)
override def apply(i: Int, j: Int): Double = values(index(i, j))
private[ml] def index(i: Int, j: Int): Int = {
require(i >= 0 && i < numRows, s"Expected 0 <= i < $numRows, got i = $i.")
require(j >= 0 && j < numCols, s"Expected 0 <= j < $numCols, got j = $j.")
if (!isTransposed) i + numRows * j else j + numCols * i
}
private[ml] def update(i: Int, j: Int, v: Double): Unit = {
values(index(i, j)) = v
}
override def copy: DenseMatrix = new DenseMatrix(numRows, numCols, values.clone())
private[spark] def map(f: Double => Double) = new DenseMatrix(numRows, numCols, values.map(f),
isTransposed)
private[ml] def update(f: Double => Double): DenseMatrix = {
val len = values.length
var i = 0
while (i < len) {
values(i) = f(values(i))
i += 1
}
this
}
override def transpose: DenseMatrix = new DenseMatrix(numCols, numRows, values, !isTransposed)
override def foreachActive(f: (Int, Int, Double) => Unit): Unit = {
if (!isTransposed) {
// outer loop over columns
var j = 0
while (j < numCols) {
var i = 0
val indStart = j * numRows
while (i < numRows) {
f(i, j, values(indStart + i))
i += 1
}
j += 1
}
} else {
// outer loop over rows
var i = 0
while (i < numRows) {
var j = 0
val indStart = i * numCols
while (j < numCols) {
f(i, j, values(indStart + j))
j += 1
}
i += 1
}
}
}
override def numNonzeros: Int = values.count(_ != 0)
override def numActives: Int = values.length
/**
* Generate a `SparseMatrix` from the given `DenseMatrix`.
*
* @param colMajor Whether the resulting `SparseMatrix` values will be in column major order.
*/
private[ml] override def toSparseMatrix(colMajor: Boolean): SparseMatrix = {
if (!colMajor) this.transpose.toSparseColMajor.transpose
else {
val spVals: MArrayBuilder[Double] = new MArrayBuilder.ofDouble
val colPtrs: Array[Int] = new Array[Int](numCols + 1)
val rowIndices: MArrayBuilder[Int] = new MArrayBuilder.ofInt
var nnz = 0
var j = 0
while (j < numCols) {
var i = 0
while (i < numRows) {
val v = values(index(i, j))
if (v != 0.0) {
rowIndices += i
spVals += v
nnz += 1
}
i += 1
}
j += 1
colPtrs(j) = nnz
}
new SparseMatrix(numRows, numCols, colPtrs, rowIndices.result(), spVals.result())
}
}
/**
* Generate a `DenseMatrix` from this `DenseMatrix`.
*
* @param colMajor Whether the resulting `DenseMatrix` values will be in column major order.
*/
private[ml] override def toDenseMatrix(colMajor: Boolean): DenseMatrix = {
if (isRowMajor && colMajor) {
new DenseMatrix(numRows, numCols, this.toArray, isTransposed = false)
} else if (isColMajor && !colMajor) {
new DenseMatrix(numRows, numCols, this.transpose.toArray, isTransposed = true)
} else {
this
}
}
override def colIter: Iterator[Vector] = {
if (isTransposed) {
Iterator.tabulate(numCols) { j =>
val col = new Array[Double](numRows)
blas.dcopy(numRows, values, j, numCols, col, 0, 1)
new DenseVector(col)
}
} else {
Iterator.tabulate(numCols) { j =>
new DenseVector(values.slice(j * numRows, (j + 1) * numRows))
}
}
}
private[ml] def getSizeInBytes: Long = Matrices.getDenseSize(numCols, numRows)
}
/**
* Factory methods for [[org.apache.spark.ml.linalg.DenseMatrix]].
*/
@Since("2.0.0")
object DenseMatrix {
/**
* Generate a `DenseMatrix` consisting of zeros.
* @param numRows number of rows of the matrix
* @param numCols number of columns of the matrix
* @return `DenseMatrix` with size `numRows` x `numCols` and values of zeros
*/
@Since("2.0.0")
def zeros(numRows: Int, numCols: Int): DenseMatrix = {
require(numRows.toLong * numCols <= Int.MaxValue,
s"$numRows x $numCols dense matrix is too large to allocate")
new DenseMatrix(numRows, numCols, new Array[Double](numRows * numCols))
}
/**
* Generate a `DenseMatrix` consisting of ones.
* @param numRows number of rows of the matrix
* @param numCols number of columns of the matrix
* @return `DenseMatrix` with size `numRows` x `numCols` and values of ones
*/
@Since("2.0.0")
def ones(numRows: Int, numCols: Int): DenseMatrix = {
require(numRows.toLong * numCols <= Int.MaxValue,
s"$numRows x $numCols dense matrix is too large to allocate")
new DenseMatrix(numRows, numCols, Array.fill(numRows * numCols)(1.0))
}
/**
* Generate an Identity Matrix in `DenseMatrix` format.
* @param n number of rows and columns of the matrix
* @return `DenseMatrix` with size `n` x `n` and values of ones on the diagonal
*/
@Since("2.0.0")
def eye(n: Int): DenseMatrix = {
val identity = DenseMatrix.zeros(n, n)
var i = 0
while (i < n) {
identity.update(i, i, 1.0)
i += 1
}
identity
}
/**
* Generate a `DenseMatrix` consisting of `i.i.d.` uniform random numbers.
* @param numRows number of rows of the matrix
* @param numCols number of columns of the matrix
* @param rng a random number generator
* @return `DenseMatrix` with size `numRows` x `numCols` and values in U(0, 1)
*/
@Since("2.0.0")
def rand(numRows: Int, numCols: Int, rng: Random): DenseMatrix = {
require(numRows.toLong * numCols <= Int.MaxValue,
s"$numRows x $numCols dense matrix is too large to allocate")
new DenseMatrix(numRows, numCols, Array.fill(numRows * numCols)(rng.nextDouble()))
}
/**
* Generate a `DenseMatrix` consisting of `i.i.d.` gaussian random numbers.
* @param numRows number of rows of the matrix
* @param numCols number of columns of the matrix
* @param rng a random number generator
* @return `DenseMatrix` with size `numRows` x `numCols` and values in N(0, 1)
*/
@Since("2.0.0")
def randn(numRows: Int, numCols: Int, rng: Random): DenseMatrix = {
require(numRows.toLong * numCols <= Int.MaxValue,
s"$numRows x $numCols dense matrix is too large to allocate")
new DenseMatrix(numRows, numCols, Array.fill(numRows * numCols)(rng.nextGaussian()))
}
/**
* Generate a diagonal matrix in `DenseMatrix` format from the supplied values.
* @param vector a `Vector` that will form the values on the diagonal of the matrix
* @return Square `DenseMatrix` with size `values.length` x `values.length` and `values`
* on the diagonal
*/
@Since("2.0.0")
def diag(vector: Vector): DenseMatrix = {
val n = vector.size
val matrix = DenseMatrix.zeros(n, n)
val values = vector.toArray
var i = 0
while (i < n) {
matrix.update(i, i, values(i))
i += 1
}
matrix
}
}
/**
* Column-major sparse matrix.
* The entry values are stored in Compressed Sparse Column (CSC) format.
* For example, the following matrix
* {{{
* 1.0 0.0 4.0
* 0.0 3.0 5.0
* 2.0 0.0 6.0
* }}}
* is stored as `values: [1.0, 2.0, 3.0, 4.0, 5.0, 6.0]`,
* `rowIndices=[0, 2, 1, 0, 1, 2]`, `colPointers=[0, 2, 3, 6]`.
*
* @param numRows number of rows
* @param numCols number of columns
* @param colPtrs the index corresponding to the start of a new column (if not transposed)
* @param rowIndices the row index of the entry (if not transposed). They must be in strictly
* increasing order for each column
* @param values nonzero matrix entries in column major (if not transposed)
* @param isTransposed whether the matrix is transposed. If true, the matrix can be considered
* Compressed Sparse Row (CSR) format, where `colPtrs` behaves as rowPtrs,
* and `rowIndices` behave as colIndices, and `values` are stored in row major.
*/
@Since("2.0.0")
class SparseMatrix @Since("2.0.0") (
@Since("2.0.0") val numRows: Int,
@Since("2.0.0") val numCols: Int,
@Since("2.0.0") val colPtrs: Array[Int],
@Since("2.0.0") val rowIndices: Array[Int],
@Since("2.0.0") val values: Array[Double],
override val isTransposed: Boolean) extends Matrix {
require(values.length == rowIndices.length, "The number of row indices and values don't match! " +
s"values.length: ${values.length}, rowIndices.length: ${rowIndices.length}")
if (isTransposed) {
require(colPtrs.length == numRows + 1,
s"Expecting ${numRows + 1} colPtrs when numRows = $numRows but got ${colPtrs.length}")
} else {
require(colPtrs.length == numCols + 1,
s"Expecting ${numCols + 1} colPtrs when numCols = $numCols but got ${colPtrs.length}")
}
require(values.length == colPtrs.last, "The last value of colPtrs must equal the number of " +
s"elements. values.length: ${values.length}, colPtrs.last: ${colPtrs.last}")
/**
* Column-major sparse matrix.
* The entry values are stored in Compressed Sparse Column (CSC) format.
* For example, the following matrix
* {{{
* 1.0 0.0 4.0
* 0.0 3.0 5.0
* 2.0 0.0 6.0
* }}}
* is stored as `values: [1.0, 2.0, 3.0, 4.0, 5.0, 6.0]`,
* `rowIndices=[0, 2, 1, 0, 1, 2]`, `colPointers=[0, 2, 3, 6]`.
*
* @param numRows number of rows
* @param numCols number of columns
* @param colPtrs the index corresponding to the start of a new column
* @param rowIndices the row index of the entry. They must be in strictly increasing
* order for each column
* @param values non-zero matrix entries in column major
*/
@Since("2.0.0")
def this(
numRows: Int,
numCols: Int,
colPtrs: Array[Int],
rowIndices: Array[Int],
values: Array[Double]) = this(numRows, numCols, colPtrs, rowIndices, values, false)
override def hashCode(): Int = asBreeze.hashCode()
override def equals(o: Any): Boolean = o match {
case m: Matrix => asBreeze == m.asBreeze
case _ => false
}
private[ml] def asBreeze: BM[Double] = {
if (!isTransposed) {
new BSM[Double](values, numRows, numCols, colPtrs, rowIndices)
} else {
val breezeMatrix = new BSM[Double](values, numCols, numRows, colPtrs, rowIndices)
breezeMatrix.t
}
}
override def apply(i: Int, j: Int): Double = {
val ind = index(i, j)
if (ind < 0) 0.0 else values(ind)
}
private[ml] def index(i: Int, j: Int): Int = {
require(i >= 0 && i < numRows, s"Expected 0 <= i < $numRows, got i = $i.")
require(j >= 0 && j < numCols, s"Expected 0 <= j < $numCols, got j = $j.")
if (!isTransposed) {
Arrays.binarySearch(rowIndices, colPtrs(j), colPtrs(j + 1), i)
} else {
Arrays.binarySearch(rowIndices, colPtrs(i), colPtrs(i + 1), j)
}
}
private[ml] def update(i: Int, j: Int, v: Double): Unit = {
val ind = index(i, j)
if (ind < 0) {
throw new NoSuchElementException("The given row and column indices correspond to a zero " +
"value. Only non-zero elements in Sparse Matrices can be updated.")
} else {
values(ind) = v
}
}
override def copy: SparseMatrix = {
new SparseMatrix(numRows, numCols, colPtrs, rowIndices, values.clone())
}
private[spark] def map(f: Double => Double) =
new SparseMatrix(numRows, numCols, colPtrs, rowIndices, values.map(f), isTransposed)
private[ml] def update(f: Double => Double): SparseMatrix = {
val len = values.length
var i = 0
while (i < len) {
values(i) = f(values(i))
i += 1
}
this
}
override def transpose: SparseMatrix =
new SparseMatrix(numCols, numRows, colPtrs, rowIndices, values, !isTransposed)
override def foreachActive(f: (Int, Int, Double) => Unit): Unit = {
if (!isTransposed) {
var j = 0
while (j < numCols) {
var idx = colPtrs(j)
val idxEnd = colPtrs(j + 1)
while (idx < idxEnd) {
f(rowIndices(idx), j, values(idx))
idx += 1
}
j += 1
}
} else {
var i = 0
while (i < numRows) {
var idx = colPtrs(i)
val idxEnd = colPtrs(i + 1)
while (idx < idxEnd) {
val j = rowIndices(idx)
f(i, j, values(idx))
idx += 1
}
i += 1
}
}
}
override def numNonzeros: Int = values.count(_ != 0)
override def numActives: Int = values.length
/**
* Generate a `SparseMatrix` from this `SparseMatrix`, removing explicit zero values if they
* exist.
*
* @param colMajor Whether or not the resulting `SparseMatrix` values are in column major
* order.
*/
private[ml] override def toSparseMatrix(colMajor: Boolean): SparseMatrix = {
if (isColMajor && !colMajor) {
// it is col major and we want row major, use breeze to remove explicit zeros
val breezeTransposed = asBreeze.asInstanceOf[BSM[Double]].t
Matrices.fromBreeze(breezeTransposed).transpose.asInstanceOf[SparseMatrix]
} else if (isRowMajor && colMajor) {
// it is row major and we want col major, use breeze to remove explicit zeros
val breezeTransposed = asBreeze.asInstanceOf[BSM[Double]]
Matrices.fromBreeze(breezeTransposed).asInstanceOf[SparseMatrix]
} else {
val nnz = numNonzeros
if (nnz != numActives) {
// remove explicit zeros
val rr = new Array[Int](nnz)
val vv = new Array[Double](nnz)
val numPtrs = if (isRowMajor) numRows else numCols
val cc = new Array[Int](numPtrs + 1)
var nzIdx = 0
var j = 0
while (j < numPtrs) {
var idx = colPtrs(j)
val idxEnd = colPtrs(j + 1)
cc(j) = nzIdx
while (idx < idxEnd) {
if (values(idx) != 0.0) {
vv(nzIdx) = values(idx)
rr(nzIdx) = rowIndices(idx)
nzIdx += 1
}
idx += 1
}
j += 1
}
cc(j) = nnz
new SparseMatrix(numRows, numCols, cc, rr, vv, isTransposed = isTransposed)
} else {
this
}
}
}
/**
* Generate a `DenseMatrix` from the given `SparseMatrix`.
*
* @param colMajor Whether the resulting `DenseMatrix` values are in column major order.
*/
private[ml] override def toDenseMatrix(colMajor: Boolean): DenseMatrix = {
if (colMajor) new DenseMatrix(numRows, numCols, this.toArray)
else new DenseMatrix(numRows, numCols, this.transpose.toArray, isTransposed = true)
}
override def colIter: Iterator[Vector] = {
if (isTransposed) {
val indicesArray = Array.fill(numCols)(MArrayBuilder.make[Int])
val valuesArray = Array.fill(numCols)(MArrayBuilder.make[Double])
var i = 0
while (i < numRows) {
var k = colPtrs(i)
val rowEnd = colPtrs(i + 1)
while (k < rowEnd) {
val j = rowIndices(k)
indicesArray(j) += i
valuesArray(j) += values(k)
k += 1
}
i += 1
}
Iterator.tabulate(numCols) { j =>
val ii = indicesArray(j).result()
val vv = valuesArray(j).result()
new SparseVector(numRows, ii, vv)
}
} else {
Iterator.tabulate(numCols) { j =>
val colStart = colPtrs(j)
val colEnd = colPtrs(j + 1)
val ii = rowIndices.slice(colStart, colEnd)
val vv = values.slice(colStart, colEnd)
new SparseVector(numRows, ii, vv)
}
}
}
private[ml] def getSizeInBytes: Long = Matrices.getSparseSize(numActives, colPtrs.length)
}
/**
* Factory methods for [[org.apache.spark.ml.linalg.SparseMatrix]].
*/
@Since("2.0.0")
object SparseMatrix {
/**
* Generate a `SparseMatrix` from Coordinate List (COO) format. Input must be an array of
* (i, j, value) tuples. Entries that have duplicate values of i and j are
* added together. Tuples where value is equal to zero will be omitted.
* @param numRows number of rows of the matrix
* @param numCols number of columns of the matrix
* @param entries Array of (i, j, value) tuples
* @return The corresponding `SparseMatrix`
*/
@Since("2.0.0")
def fromCOO(numRows: Int, numCols: Int, entries: Iterable[(Int, Int, Double)]): SparseMatrix = {
val sortedEntries = entries.toSeq.sortBy(v => (v._2, v._1))
val numEntries = sortedEntries.size
if (sortedEntries.nonEmpty) {
// Since the entries are sorted by column index, we only need to check the first and the last.
for (col <- Seq(sortedEntries.head._2, sortedEntries.last._2)) {
require(col >= 0 && col < numCols, s"Column index out of range [0, $numCols): $col.")
}
}
val colPtrs = new Array[Int](numCols + 1)
val rowIndices = MArrayBuilder.make[Int]
rowIndices.sizeHint(numEntries)
val values = MArrayBuilder.make[Double]
values.sizeHint(numEntries)
var nnz = 0
var prevCol = 0
var prevRow = -1
var prevVal = 0.0
// Append a dummy entry to include the last one at the end of the loop.
(sortedEntries.view :+ ((numRows, numCols, 1.0))).foreach { case (i, j, v) =>
if (v != 0) {
if (i == prevRow && j == prevCol) {
prevVal += v
} else {
if (prevVal != 0) {
require(prevRow >= 0 && prevRow < numRows,
s"Row index out of range [0, $numRows): $prevRow.")
nnz += 1
rowIndices += prevRow
values += prevVal
}
prevRow = i
prevVal = v
while (prevCol < j) {
colPtrs(prevCol + 1) = nnz
prevCol += 1
}
}
}
}
new SparseMatrix(numRows, numCols, colPtrs, rowIndices.result(), values.result())
}
/**
* Generate an Identity Matrix in `SparseMatrix` format.
* @param n number of rows and columns of the matrix
* @return `SparseMatrix` with size `n` x `n` and values of ones on the diagonal
*/
@Since("2.0.0")
def speye(n: Int): SparseMatrix = {
new SparseMatrix(n, n, (0 to n).toArray, (0 until n).toArray, Array.fill(n)(1.0))
}
/**
* Generates the skeleton of a random `SparseMatrix` with a given random number generator.
* The values of the matrix returned are undefined.
*/
private def genRandMatrix(
numRows: Int,
numCols: Int,
density: Double,
rng: Random): SparseMatrix = {
require(numRows > 0, s"numRows must be greater than 0 but got $numRows")
require(numCols > 0, s"numCols must be greater than 0 but got $numCols")
require(density >= 0.0 && density <= 1.0,
s"density must be a double in the range 0.0 <= d <= 1.0. Currently, density: $density")
val size = numRows.toLong * numCols
val expected = size * density
assert(expected < Int.MaxValue,
"The expected number of nonzeros cannot be greater than Int.MaxValue.")
val nnz = math.ceil(expected).toInt
if (density == 0.0) {
new SparseMatrix(numRows, numCols, new Array[Int](numCols + 1), Array.empty, Array.empty)
} else if (density == 1.0) {
val colPtrs = Array.tabulate(numCols + 1)(j => j * numRows)
val rowIndices = Array.tabulate(size.toInt)(idx => idx % numRows)
new SparseMatrix(numRows, numCols, colPtrs, rowIndices, new Array[Double](numRows * numCols))
} else if (density < 0.34) {
// draw-by-draw, expected number of iterations is less than 1.5 * nnz
val entries = MHashSet[(Int, Int)]()
while (entries.size < nnz) {
entries += ((rng.nextInt(numRows), rng.nextInt(numCols)))
}
SparseMatrix.fromCOO(numRows, numCols, entries.map(v => (v._1, v._2, 1.0)))
} else {
// selection-rejection method
var idx = 0L
var numSelected = 0
var j = 0
val colPtrs = new Array[Int](numCols + 1)
val rowIndices = new Array[Int](nnz)
while (j < numCols && numSelected < nnz) {
var i = 0
while (i < numRows && numSelected < nnz) {
if (rng.nextDouble() < 1.0 * (nnz - numSelected) / (size - idx)) {
rowIndices(numSelected) = i
numSelected += 1
}
i += 1
idx += 1
}
colPtrs(j + 1) = numSelected
j += 1
}
new SparseMatrix(numRows, numCols, colPtrs, rowIndices, new Array[Double](nnz))
}
}
/**
* Generate a `SparseMatrix` consisting of `i.i.d`. uniform random numbers. The number of non-zero
* elements equal the ceiling of `numRows` x `numCols` x `density`
*
* @param numRows number of rows of the matrix
* @param numCols number of columns of the matrix
* @param density the desired density for the matrix
* @param rng a random number generator
* @return `SparseMatrix` with size `numRows` x `numCols` and values in U(0, 1)
*/
@Since("2.0.0")
def sprand(numRows: Int, numCols: Int, density: Double, rng: Random): SparseMatrix = {
val mat = genRandMatrix(numRows, numCols, density, rng)
mat.update(i => rng.nextDouble())
}
/**
* Generate a `SparseMatrix` consisting of `i.i.d`. gaussian random numbers.
* @param numRows number of rows of the matrix
* @param numCols number of columns of the matrix
* @param density the desired density for the matrix
* @param rng a random number generator
* @return `SparseMatrix` with size `numRows` x `numCols` and values in N(0, 1)
*/
@Since("2.0.0")
def sprandn(numRows: Int, numCols: Int, density: Double, rng: Random): SparseMatrix = {
val mat = genRandMatrix(numRows, numCols, density, rng)
mat.update(i => rng.nextGaussian())
}
/**
* Generate a diagonal matrix in `SparseMatrix` format from the supplied values.
* @param vector a `Vector` that will form the values on the diagonal of the matrix
* @return Square `SparseMatrix` with size `values.length` x `values.length` and non-zero
* `values` on the diagonal
*/
@Since("2.0.0")
def spdiag(vector: Vector): SparseMatrix = {
val n = vector.size
vector match {
case sVec: SparseVector =>
SparseMatrix.fromCOO(n, n, sVec.indices.zip(sVec.values).map(v => (v._1, v._1, v._2)))
case dVec: DenseVector =>
val entries = dVec.values.zipWithIndex
val nnzVals = entries.filter(v => v._1 != 0.0)
SparseMatrix.fromCOO(n, n, nnzVals.map(v => (v._2, v._2, v._1)))
}
}
}
/**
* Factory methods for [[org.apache.spark.ml.linalg.Matrix]].
*/
@Since("2.0.0")
object Matrices {
/**
* Creates a column-major dense matrix.
*
* @param numRows number of rows
* @param numCols number of columns
* @param values matrix entries in column major
*/
@Since("2.0.0")
def dense(numRows: Int, numCols: Int, values: Array[Double]): Matrix = {
new DenseMatrix(numRows, numCols, values)
}
/**
* Creates a column-major sparse matrix in Compressed Sparse Column (CSC) format.
*
* @param numRows number of rows
* @param numCols number of columns
* @param colPtrs the index corresponding to the start of a new column
* @param rowIndices the row index of the entry
* @param values non-zero matrix entries in column major
*/
@Since("2.0.0")
def sparse(
numRows: Int,
numCols: Int,
colPtrs: Array[Int],
rowIndices: Array[Int],
values: Array[Double]): Matrix = {
new SparseMatrix(numRows, numCols, colPtrs, rowIndices, values)
}
/**
* Creates a Matrix instance from a breeze matrix.
* @param breeze a breeze matrix
* @return a Matrix instance
*/
private[ml] def fromBreeze(breeze: BM[Double]): Matrix = {
breeze match {
case dm: BDM[Double] =>
new DenseMatrix(dm.rows, dm.cols, dm.data, dm.isTranspose)
case sm: BSM[Double] =>
// There is no isTranspose flag for sparse matrices in Breeze
new SparseMatrix(sm.rows, sm.cols, sm.colPtrs, sm.rowIndices, sm.data)
case _ =>
throw new UnsupportedOperationException(
s"Do not support conversion from type ${breeze.getClass.getName}.")
}
}
/**
* Generate a `Matrix` consisting of zeros.
* @param numRows number of rows of the matrix
* @param numCols number of columns of the matrix
* @return `Matrix` with size `numRows` x `numCols` and values of zeros
*/
@Since("2.0.0")
def zeros(numRows: Int, numCols: Int): Matrix = DenseMatrix.zeros(numRows, numCols)
/**
* Generate a `DenseMatrix` consisting of ones.
* @param numRows number of rows of the matrix
* @param numCols number of columns of the matrix
* @return `Matrix` with size `numRows` x `numCols` and values of ones
*/
@Since("2.0.0")
def ones(numRows: Int, numCols: Int): Matrix = DenseMatrix.ones(numRows, numCols)
/**
* Generate a dense Identity Matrix in `Matrix` format.
* @param n number of rows and columns of the matrix
* @return `Matrix` with size `n` x `n` and values of ones on the diagonal
*/
@Since("2.0.0")
def eye(n: Int): Matrix = DenseMatrix.eye(n)
/**
* Generate a sparse Identity Matrix in `Matrix` format.
* @param n number of rows and columns of the matrix
* @return `Matrix` with size `n` x `n` and values of ones on the diagonal
*/
@Since("2.0.0")
def speye(n: Int): Matrix = SparseMatrix.speye(n)
/**
* Generate a `DenseMatrix` consisting of `i.i.d.` uniform random numbers.
* @param numRows number of rows of the matrix
* @param numCols number of columns of the matrix
* @param rng a random number generator
* @return `Matrix` with size `numRows` x `numCols` and values in U(0, 1)
*/
@Since("2.0.0")
def rand(numRows: Int, numCols: Int, rng: Random): Matrix =
DenseMatrix.rand(numRows, numCols, rng)
/**
* Generate a `SparseMatrix` consisting of `i.i.d.` gaussian random numbers.
* @param numRows number of rows of the matrix
* @param numCols number of columns of the matrix
* @param density the desired density for the matrix
* @param rng a random number generator
* @return `Matrix` with size `numRows` x `numCols` and values in U(0, 1)
*/
@Since("2.0.0")
def sprand(numRows: Int, numCols: Int, density: Double, rng: Random): Matrix =
SparseMatrix.sprand(numRows, numCols, density, rng)
/**
* Generate a `DenseMatrix` consisting of `i.i.d.` gaussian random numbers.
* @param numRows number of rows of the matrix
* @param numCols number of columns of the matrix
* @param rng a random number generator
* @return `Matrix` with size `numRows` x `numCols` and values in N(0, 1)
*/
@Since("2.0.0")
def randn(numRows: Int, numCols: Int, rng: Random): Matrix =
DenseMatrix.randn(numRows, numCols, rng)
/**
* Generate a `SparseMatrix` consisting of `i.i.d.` gaussian random numbers.
* @param numRows number of rows of the matrix
* @param numCols number of columns of the matrix
* @param density the desired density for the matrix
* @param rng a random number generator
* @return `Matrix` with size `numRows` x `numCols` and values in N(0, 1)
*/
@Since("2.0.0")
def sprandn(numRows: Int, numCols: Int, density: Double, rng: Random): Matrix =
SparseMatrix.sprandn(numRows, numCols, density, rng)
/**
* Generate a diagonal matrix in `Matrix` format from the supplied values.
* @param vector a `Vector` that will form the values on the diagonal of the matrix
* @return Square `Matrix` with size `values.length` x `values.length` and `values`
* on the diagonal
*/
@Since("2.0.0")
def diag(vector: Vector): Matrix = DenseMatrix.diag(vector)
/**
* Horizontally concatenate a sequence of matrices. The returned matrix will be in the format
* the matrices are supplied in. Supplying a mix of dense and sparse matrices will result in
* a sparse matrix. If the Array is empty, an empty `DenseMatrix` will be returned.
* @param matrices array of matrices
* @return a single `Matrix` composed of the matrices that were horizontally concatenated
*/
@Since("2.0.0")
def horzcat(matrices: Array[Matrix]): Matrix = {
if (matrices.isEmpty) {
return new DenseMatrix(0, 0, Array.empty)
} else if (matrices.length == 1) {
return matrices(0)
}
val numRows = matrices(0).numRows
var hasSparse = false
var numCols = 0
matrices.foreach { mat =>
require(numRows == mat.numRows, "The number of rows of the matrices in this sequence, " +
"don't match!")
mat match {
case sparse: SparseMatrix => hasSparse = true
case dense: DenseMatrix => // empty on purpose
case _ => throw new IllegalArgumentException("Unsupported matrix format. Expected " +
s"SparseMatrix or DenseMatrix. Instead got: ${mat.getClass}")
}
numCols += mat.numCols
}
if (!hasSparse) {
new DenseMatrix(numRows, numCols, matrices.flatMap(_.toArray))
} else {
var startCol = 0
val entries: Array[(Int, Int, Double)] = matrices.flatMap { mat =>
val nCols = mat.numCols
mat match {
case spMat: SparseMatrix =>
val data = new Array[(Int, Int, Double)](spMat.values.length)
var cnt = 0
spMat.foreachActive { (i, j, v) =>
data(cnt) = (i, j + startCol, v)
cnt += 1
}
startCol += nCols
data
case dnMat: DenseMatrix =>
val data = new ArrayBuffer[(Int, Int, Double)]()
dnMat.foreachActive { (i, j, v) =>
if (v != 0.0) {
data += Tuple3(i, j + startCol, v)
}
}
startCol += nCols
data
}
}
SparseMatrix.fromCOO(numRows, numCols, entries)
}
}
/**
* Vertically concatenate a sequence of matrices. The returned matrix will be in the format
* the matrices are supplied in. Supplying a mix of dense and sparse matrices will result in
* a sparse matrix. If the Array is empty, an empty `DenseMatrix` will be returned.
* @param matrices array of matrices
* @return a single `Matrix` composed of the matrices that were vertically concatenated
*/
@Since("2.0.0")
def vertcat(matrices: Array[Matrix]): Matrix = {
if (matrices.isEmpty) {
return new DenseMatrix(0, 0, Array.empty)
} else if (matrices.length == 1) {
return matrices(0)
}
val numCols = matrices(0).numCols
var hasSparse = false
var numRows = 0
matrices.foreach { mat =>
require(numCols == mat.numCols, "The number of rows of the matrices in this sequence, " +
"don't match!")
mat match {
case sparse: SparseMatrix => hasSparse = true
case dense: DenseMatrix => // empty on purpose
case _ => throw new IllegalArgumentException("Unsupported matrix format. Expected " +
s"SparseMatrix or DenseMatrix. Instead got: ${mat.getClass}")
}
numRows += mat.numRows
}
if (!hasSparse) {
val allValues = new Array[Double](numRows * numCols)
var startRow = 0
matrices.foreach { mat =>
var j = 0
val nRows = mat.numRows
mat.foreachActive { (i, j, v) =>
val indStart = j * numRows + startRow
allValues(indStart + i) = v
}
startRow += nRows
}
new DenseMatrix(numRows, numCols, allValues)
} else {
var startRow = 0
val entries: Array[(Int, Int, Double)] = matrices.flatMap { mat =>
val nRows = mat.numRows
mat match {
case spMat: SparseMatrix =>
val data = new Array[(Int, Int, Double)](spMat.values.length)
var cnt = 0
spMat.foreachActive { (i, j, v) =>
data(cnt) = (i + startRow, j, v)
cnt += 1
}
startRow += nRows
data
case dnMat: DenseMatrix =>
val data = new ArrayBuffer[(Int, Int, Double)]()
dnMat.foreachActive { (i, j, v) =>
if (v != 0.0) {
data += Tuple3(i + startRow, j, v)
}
}
startRow += nRows
data
}
}
SparseMatrix.fromCOO(numRows, numCols, entries)
}
}
private[ml] def getSparseSize(numActives: Long, numPtrs: Long): Long = {
/*
Sparse matrices store two int arrays, one double array, two ints, and one boolean:
8 * values.length + 4 * rowIndices.length + 4 * colPtrs.length + arrayHeader * 3 + 2 * 4 + 1
*/
val doubleBytes = java.lang.Double.BYTES
val intBytes = java.lang.Integer.BYTES
val arrayHeader = 12L
doubleBytes * numActives + intBytes * numActives + intBytes * numPtrs + arrayHeader * 3L + 9L
}
private[ml] def getDenseSize(numCols: Long, numRows: Long): Long = {
/*
Dense matrices store one double array, two ints, and one boolean:
8 * values.length + arrayHeader + 2 * 4 + 1
*/
val doubleBytes = java.lang.Double.BYTES
val arrayHeader = 12L
doubleBytes * numCols * numRows + arrayHeader + 9L
}
}
| minixalpha/spark | mllib-local/src/main/scala/org/apache/spark/ml/linalg/Matrices.scala | Scala | apache-2.0 | 44,093 |
package no.digipost.labs.oauth
import com.ning.http.client.ProxyServer
import com.ning.http.client.ProxyServer.Protocol
import scala.concurrent.Future
import org.json4s._
import org.json4s.jackson.JsonMethods._
import dispatch._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Success, Failure}
import no.digipost.labs.Settings.Proxy
case class DigipostUser(id: String, name: String, emailAddress: String, digipostAddress: String)
trait DigipostService {
def getBasicUserDetails(uri: String, proxy: Option[Proxy], accessToken: AccessToken): Future[DigipostUser]
}
class HttpDigipostService extends DigipostService {
implicit val jsonFormats = DefaultFormats
def getBasicUserDetails(uri: String, proxy: Option[Proxy], accessToken: AccessToken): Future[DigipostUser] = {
val request = url(uri)
val requestWithProxy = proxy.map(proxy => request.setProxyServer(new ProxyServer(Protocol.HTTPS, proxy.host, proxy.port))).getOrElse(request)
Http(requestWithProxy <:< Seq("Accept" -> "application/vnd.digipost-v2+json", "Authorization" -> s"Bearer ${accessToken.access_token}") > {
response =>
val rootJson = parse(response.getResponseBody("utf-8"))
val name = (rootJson \\ "primaryAccount" \\ "fullName").extract[String]
val emailAddress = (rootJson \\ "primaryAccount" \\ "email").extractOpt[List[String]]
val digipostAddress = (rootJson \\ "primaryAccount" \\ "digipostaddress").extract[String]
AccessToken.getUserId(accessToken) match {
case Success(id) => DigipostUser(id, name, emailAddress.flatMap(_.headOption).getOrElse(""), digipostAddress)
case Failure(error) => throw error
}
})
}
} | digipost/labs | backend/src/main/scala/no/digipost/labs/oauth/DigipostService.scala | Scala | apache-2.0 | 1,714 |
/*
* Copyright 2014 Michael Krolikowski
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.mkroli.dns4s.section
import org.scalatest.FunSpec
import com.github.mkroli.dns4s.MessageBuffer
import com.github.mkroli.dns4s.bytes
import com.github.mkroli.dns4s.maxInt
import com.github.mkroli.dns4s.maxLong
import com.github.mkroli.dns4s.section.resource.UnknownResource
class ResourceRecordSpec extends FunSpec {
describe("ResourceRecord") {
describe("validation") {
describe("type") {
it("should fail if it is out of bounds") {
intercept[IllegalArgumentException](ResourceRecord("", -1, 0, 0, UnknownResource(Nil, 0)))
intercept[IllegalArgumentException](ResourceRecord("", maxInt(16) + 1, 0, 0, UnknownResource(Nil, 0)))
}
it("should not fail if it is within bounds") {
ResourceRecord("", 0, 0, 0, UnknownResource(Nil, 0))
ResourceRecord("", maxInt(16), 0, 0, UnknownResource(Nil, 0))
}
}
describe("class") {
it("should fail if it is out of bounds") {
intercept[IllegalArgumentException](ResourceRecord("", 0, -1, 0, UnknownResource(Nil, 0)))
intercept[IllegalArgumentException](ResourceRecord("", 0, maxInt(16) + 1, 0, UnknownResource(Nil, 0)))
}
it("should not fail if it is within bounds") {
ResourceRecord("", 0, 0, 0, UnknownResource(Nil, 0))
ResourceRecord("", 0, maxInt(16), 0, UnknownResource(Nil, 0))
}
}
describe("ttl") {
it("should fail if it is out of bounds") {
intercept[IllegalArgumentException](ResourceRecord("", 0, 0, -1, UnknownResource(Nil, 0)))
intercept[IllegalArgumentException](ResourceRecord("", 0, 0, maxLong(32) + 1, UnknownResource(Nil, 0)))
}
it("should not fail if it is within bounds") {
ResourceRecord("", 0, 0, 0, UnknownResource(Nil, 0))
ResourceRecord("", 0, 0, maxLong(32), UnknownResource(Nil, 0))
}
}
}
describe("encoding/decoding") {
it("decode(encode(resourceRecord)) should be the same as resourceRecord") {
def testEncodeDecode(rr: ResourceRecord) {
assert(rr === ResourceRecord(rr(MessageBuffer()).flipped))
}
testEncodeDecode(ResourceRecord("", 0, 0, 0, UnknownResource(Nil, 0)))
testEncodeDecode(ResourceRecord("test.test.test", maxInt(16), maxInt(16), maxLong(32), UnknownResource(Nil, maxInt(16))))
}
it("should prevent infinite loop with compression") {
val b = MessageBuffer().put(bytes("C000 0000 0000 00000000 0000").toArray).flipped
intercept[AssertionError](ResourceRecord(b))
}
it("should encode/decode a specific byte array") {
val rr = ResourceRecord("test.test.test", 1, 2, 3, UnknownResource(Nil, 1))(MessageBuffer()).flipped
assert(bytes("04 74 65 73 74 04 74 65 73 74 04 74 65 73 74 00 0001 0002 00000003 0000") === rr.getBytes(rr.remaining))
}
it("should encode/decode a byte array filled with 0s") {
val rr = ResourceRecord("", 0, 0, 0, UnknownResource(Nil, 0))(MessageBuffer()).flipped
assert(bytes("00 0000 0000 00000000 0000") === rr.getBytes(rr.remaining))
}
it("should encode/decode a byte array filled with mostly 1s") {
val rr = ResourceRecord("", maxInt(16), maxInt(16), maxLong(32), UnknownResource(Nil, maxInt(16)))(MessageBuffer()).flipped
assert(bytes("00 FFFF FFFF FFFFFFFF 0000") === rr.getBytes(rr.remaining))
}
}
}
}
| mesosphere/dns4s | core/src/test/scala/com/github/mkroli/dns4s/section/ResourceRecordSpec.scala | Scala | apache-2.0 | 4,076 |
package com.tothferenc.templateFX.examples.todo
import com.tothferenc.templateFX.examples.todo.model.TodoItem
import com.tothferenc.templateFX.examples.todo.model.TodoModel
import org.slf4j.LoggerFactory
class Component(appModel: TodoModel, protoRenderer: Reactor[Intent] => Renderer[TodoModel]) extends Reactor[Intent] {
private lazy val logger = LoggerFactory.getLogger("Component")
private def nextId = System.currentTimeMillis()
private val renderer = protoRenderer(this)
def render(): Unit = renderer.render(appModel)
val updateModel: Intent => Unit = {
case Append(item) if item.nonEmpty =>
appModel.items.append(TodoItem(nextId, false, item))
case Prepend(item) if item.nonEmpty =>
appModel.items.prepend(TodoItem(nextId, false, item))
case Insert(item, position) if item.nonEmpty =>
val actualPosition = if (position > appModel.items.length) appModel.items.length else position
appModel.items.insert(actualPosition, TodoItem(nextId, false, item))
case Delete(key) =>
indexOfKey(key).foreach(appModel.items.remove)
case Move(key, targetPosition) =>
val index = appModel.items.lastIndexWhere(_.id == key)
if (index > -1) {
val item = appModel.items(index)
appModel.items.remove(index)
appModel.items.insert(targetPosition, item)
}
case ToggleCompleted(key, completed) =>
appModel.items.find(_.id == key).foreach(_.completed = completed)
case ToggleShowCompleted(show) =>
appModel.showCompleted = show
case Editing(key) =>
appModel.editing = Some(key)
case EditFinished(key, text) =>
appModel.editing = None
appModel.items.find(_.id == key).foreach(_.name = text)
}
override def handle(message: Intent): Unit = {
updateModel(message)
val renderBegin = System.currentTimeMillis()
renderer.render(appModel)
logger.debug(s"Reaction to $message took ${System.currentTimeMillis() - renderBegin} ms.")
}
private def indexOfKey(key: Long): Option[Int] = {
val index = appModel.items.indexWhere(_.id == key)
if (index > -1) Some(index) else None
}
}
| tferi/templateFX | examples/src/main/scala/com/tothferenc/templateFX/examples/todo/Component.scala | Scala | gpl-3.0 | 2,136 |
/*
active-learning-scala: Active Learning library for Scala
Copyright (c) 2014 Davi Pereira dos Santos
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package clean.tex
import al.strategies._
import clean.lib._
import ml.classifiers._
import util.Stat
object METAtabwinnersPares extends AppWithUsage with LearnerTrait with StratsTrait with RangeGenerator {
lazy val arguments = superArguments ++ List("learners:nb,5nn,c45,vfdt,ci,...|eci|i|ei|in|svm")
val context = "tabwinnersPares"
val n = 1
val qs = "100"
val measure = ALCKappa
run()
override def run() = {
super.run()
val ls = learners(learnersStr)
// val strats = Seq((l:Learner)=>MarginFixo(l,Seq()))
val strats = stratsTexReduxMeta("maha")
// val strats = stratsTex("all")
val datasetLearnerAndBoth = for {
dataset <- datasets.toList
} yield {
val ds = Ds(dataset, readOnly = true)
ds.open()
lazy val (ti, th, tf, tpass) = ranges(ds)
val sres0 = for {s0 <- strats} yield {
val metads = new Db("meta", readOnly = true)
metads.open()
val sql = s"select pre from e where mc='ELM' and st='${s0(NoLearner()).limp}' and ds='$dataset'"
val classif = metads.readString(sql) match {
case List(Vector(predito)) =>
val cla = predito.split("-").last
ls.find(_.limp == cla).getOrElse(???)
case x => println(s"${x} <- x")
println(s"${sql} <- ")
sys.exit(1)
}
val s = s0(classif)
metads.close()
val (cs, vs) = (for {
r <- 0 until runs
f <- 0 until folds
} yield {
try {
classif.limpa -> measure(ds, s, classif, r, f)(ti, tf).read(ds).getOrElse {
println((ds, s, s.learner, classif, r, f) + ": medida não encontrada")
sys.exit(0) //NA
}
} catch {
case e: Throwable => println((ds, s, s.learner, r, f) + e.getMessage)
sys.exit(0) //NA
}
}).unzip
val sem = s.limp -> Stat.media_desvioPadrao(vs.toVector)._1
val classifc = RF()
val sc = s0(classifc)
val (csc, vsc) = (for {
r <- 0 until runs
f <- 0 until folds
} yield {
try {
classifc.limpa -> measure(ds, sc, classifc, r, f)(ti, tf).read(ds).getOrElse {
println((ds, sc, sc.learner, classifc, r, f) + ": medida não encontrada")
sys.exit(0) //NA
}
} catch {
case e: Throwable => println((ds, sc, sc.learner, r, f) + e.getMessage)
sys.exit(0) //NA
}
}).unzip
val com = sc.limp + "_m" -> Stat.media_desvioPadrao(vsc.toVector)._1
Seq(sem, com)
}
val sres = sres0.flatten
val rnd = sres.find(x => x._1 == RandomSampling(Seq()).limp || x._1 == RandomSampling(Seq()).limp + "_m").getOrElse("" -> 0d)._2
val res = (ds.dataset -> pegaMelhores(sres, n)(_._2).map(_._1),
ds.dataset -> pegaMelhores(sres, n)(-_._2).map(_._1),
ds.dataset -> sres.filter(_._2 <= rnd).map(_._1).toList)
ds.close()
res
}
val (datasetLearnerAndWinners, datasetLearnerAndLosers, pioresQueRnd) = datasetLearnerAndBoth.unzip3
println(s"$n primeiros/últimos")
println(s"${datasetLearnerAndBoth.size} tests.")
println(s"--------$measure---------------")
val flat = datasetLearnerAndWinners.flatMap(_._2)
val flat2 = datasetLearnerAndLosers.flatMap(_._2)
val flat3 = pioresQueRnd.flatMap(_._2)
val algs1 = flat.distinct map { st =>
val topCount = flat.count(_ == st)
val botCount = flat2.count(_ == st)
val rndCount = flat3.count(_ == st)
(st, topCount, rndCount, botCount)
}
println(s"${if (qs == "50") "50" else ""}")
println( """\\begin{tabular}{lccc}
algoritmo & \\makecell{primeiros\\\\lugares} & \\makecell{derrotas\\\\para Rnd} & \\makecell{últimos\\\\lugares} \\\\
\\hline
""")
algs1.sortBy(_._2).reverse foreach { case (st, topCount, rndCount, botCount) =>
println(s"${st.padTo(10, ' ')} & \\t$topCount & \\t$rndCount & \\t$botCount \\\\\\\\")
}
println(
"""\\end{tabular}
""".stripMargin)
}
} | active-learning/active-learning-scala | src/main/scala/clean/tex/METAtabwinnersPares.scala | Scala | gpl-2.0 | 5,018 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import java.io._
import java.net.URI
import java.nio.charset.StandardCharsets
import java.util.Locale
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, FSDataOutputStream, Path}
import org.apache.hadoop.fs.permission.FsPermission
import org.json4s.JsonAST.JValue
import org.json4s.jackson.JsonMethods._
import org.apache.spark.{SPARK_VERSION, SparkConf}
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.internal.Logging
import org.apache.spark.io.CompressionCodec
import org.apache.spark.util.{JsonProtocol, Utils}
/**
* A SparkListener that logs events to persistent storage.
*
* Event logging is specified by the following configurable parameters:
* spark.eventLog.enabled - Whether event logging is enabled.
* spark.eventLog.compress - Whether to compress logged events
* spark.eventLog.overwrite - Whether to overwrite any existing files.
* spark.eventLog.dir - Path to the directory in which events are logged.
* spark.eventLog.buffer.kb - Buffer size to use when writing to output streams
*/
private[spark] class EventLoggingListener(
appId: String,
appAttemptId : Option[String],
logBaseDir: URI,
sparkConf: SparkConf,
hadoopConf: Configuration)
extends SparkListener with Logging {
import EventLoggingListener._
def this(appId: String, appAttemptId : Option[String], logBaseDir: URI, sparkConf: SparkConf) =
this(appId, appAttemptId, logBaseDir, sparkConf,
SparkHadoopUtil.get.newConfiguration(sparkConf))
private val shouldCompress = sparkConf.getBoolean("spark.eventLog.compress", false)
private val shouldOverwrite = sparkConf.getBoolean("spark.eventLog.overwrite", false)
private val testing = sparkConf.getBoolean("spark.eventLog.testing", false)
private val outputBufferSize = sparkConf.getInt("spark.eventLog.buffer.kb", 100) * 1024
private val fileSystem = Utils.getHadoopFileSystem(logBaseDir, hadoopConf)
private val compressionCodec =
if (shouldCompress) {
Some(CompressionCodec.createCodec(sparkConf))
} else {
None
}
private val compressionCodecName = compressionCodec.map { c =>
CompressionCodec.getShortName(c.getClass.getName)
}
// Only defined if the file system scheme is not local
private var hadoopDataStream: Option[FSDataOutputStream] = None
private var writer: Option[PrintWriter] = None
// For testing. Keep track of all JSON serialized events that have been logged.
private[scheduler] val loggedEvents = new ArrayBuffer[JValue]
// Visible for tests only.
private[scheduler] val logPath = getLogPath(logBaseDir, appId, appAttemptId, compressionCodecName)
/**
* Creates the log file in the configured log directory.
*/
def start() {
if (!fileSystem.getFileStatus(new Path(logBaseDir)).isDirectory) {
throw new IllegalArgumentException(s"Log directory $logBaseDir is not a directory.")
}
val workingPath = logPath + IN_PROGRESS
val uri = new URI(workingPath)
val path = new Path(workingPath)
val defaultFs = FileSystem.getDefaultUri(hadoopConf).getScheme
val isDefaultLocal = defaultFs == null || defaultFs == "file"
if (shouldOverwrite && fileSystem.delete(path, true)) {
logWarning(s"Event log $path already exists. Overwriting...")
}
/* The Hadoop LocalFileSystem (r1.0.4) has known issues with syncing (HADOOP-7844).
* Therefore, for local files, use FileOutputStream instead. */
val dstream =
if ((isDefaultLocal && uri.getScheme == null) || uri.getScheme == "file") {
new FileOutputStream(uri.getPath)
} else {
hadoopDataStream = Some(fileSystem.create(path))
hadoopDataStream.get
}
try {
val cstream = compressionCodec.map(_.compressedOutputStream(dstream)).getOrElse(dstream)
val bstream = new BufferedOutputStream(cstream, outputBufferSize)
EventLoggingListener.initEventLog(bstream, testing, loggedEvents)
fileSystem.setPermission(path, LOG_FILE_PERMISSIONS)
writer = Some(new PrintWriter(bstream))
logInfo("Logging events to %s".format(logPath))
} catch {
case e: Exception =>
dstream.close()
throw e
}
}
/** Log the event as JSON. */
private def logEvent(event: SparkListenerEvent, flushLogger: Boolean = false) {
val eventJson = JsonProtocol.sparkEventToJson(event)
// scalastyle:off println
writer.foreach(_.println(compact(render(eventJson))))
// scalastyle:on println
if (flushLogger) {
writer.foreach(_.flush())
hadoopDataStream.foreach(_.hflush())
}
if (testing) {
loggedEvents += eventJson
}
}
// Events that do not trigger a flush
override def onStageSubmitted(event: SparkListenerStageSubmitted): Unit = logEvent(event)
override def onTaskStart(event: SparkListenerTaskStart): Unit = logEvent(event)
override def onTaskGettingResult(event: SparkListenerTaskGettingResult): Unit = logEvent(event)
override def onTaskEnd(event: SparkListenerTaskEnd): Unit = logEvent(event)
override def onEnvironmentUpdate(event: SparkListenerEnvironmentUpdate): Unit = {
logEvent(redactEvent(event))
}
// Events that trigger a flush
override def onStageCompleted(event: SparkListenerStageCompleted): Unit = {
logEvent(event, flushLogger = true)
}
override def onJobStart(event: SparkListenerJobStart): Unit = logEvent(event, flushLogger = true)
override def onJobEnd(event: SparkListenerJobEnd): Unit = logEvent(event, flushLogger = true)
override def onBlockManagerAdded(event: SparkListenerBlockManagerAdded): Unit = {
logEvent(event, flushLogger = true)
}
override def onBlockManagerRemoved(event: SparkListenerBlockManagerRemoved): Unit = {
logEvent(event, flushLogger = true)
}
override def onUnpersistRDD(event: SparkListenerUnpersistRDD): Unit = {
logEvent(event, flushLogger = true)
}
override def onApplicationStart(event: SparkListenerApplicationStart): Unit = {
logEvent(event, flushLogger = true)
}
override def onApplicationEnd(event: SparkListenerApplicationEnd): Unit = {
logEvent(event, flushLogger = true)
}
override def onExecutorAdded(event: SparkListenerExecutorAdded): Unit = {
logEvent(event, flushLogger = true)
}
override def onExecutorRemoved(event: SparkListenerExecutorRemoved): Unit = {
logEvent(event, flushLogger = true)
}
override def onExecutorBlacklisted(event: SparkListenerExecutorBlacklisted): Unit = {
logEvent(event, flushLogger = true)
}
override def onExecutorUnblacklisted(event: SparkListenerExecutorUnblacklisted): Unit = {
logEvent(event, flushLogger = true)
}
override def onNodeBlacklisted(event: SparkListenerNodeBlacklisted): Unit = {
logEvent(event, flushLogger = true)
}
override def onNodeUnblacklisted(event: SparkListenerNodeUnblacklisted): Unit = {
logEvent(event, flushLogger = true)
}
// No-op because logging every update would be overkill
override def onBlockUpdated(event: SparkListenerBlockUpdated): Unit = {}
// No-op because logging every update would be overkill
override def onExecutorMetricsUpdate(event: SparkListenerExecutorMetricsUpdate): Unit = { }
override def onOtherEvent(event: SparkListenerEvent): Unit = {
if (event.logEvent) {
logEvent(event, flushLogger = true)
}
}
/**
* Stop logging events. The event log file will be renamed so that it loses the
* ".inprogress" suffix.
*/
def stop(): Unit = {
writer.foreach(_.close())
val target = new Path(logPath)
if (fileSystem.exists(target)) {
if (shouldOverwrite) {
logWarning(s"Event log $target already exists. Overwriting...")
if (!fileSystem.delete(target, true)) {
logWarning(s"Error deleting $target")
}
} else {
throw new IOException("Target log file already exists (%s)".format(logPath))
}
}
fileSystem.rename(new Path(logPath + IN_PROGRESS), target)
// touch file to ensure modtime is current across those filesystems where rename()
// does not set it, -and which support setTimes(); it's a no-op on most object stores
try {
fileSystem.setTimes(target, System.currentTimeMillis(), -1)
} catch {
case e: Exception => logDebug(s"failed to set time of $target", e)
}
}
private[spark] def redactEvent(
event: SparkListenerEnvironmentUpdate): SparkListenerEnvironmentUpdate = {
// environmentDetails maps a string descriptor to a set of properties
// Similar to:
// "JVM Information" -> jvmInformation,
// "Spark Properties" -> sparkProperties,
// ...
// where jvmInformation, sparkProperties, etc. are sequence of tuples.
// We go through the various of properties and redact sensitive information from them.
val redactedProps = event.environmentDetails.map{ case (name, props) =>
name -> Utils.redact(sparkConf, props)
}
SparkListenerEnvironmentUpdate(redactedProps)
}
}
private[spark] object EventLoggingListener extends Logging {
// Suffix applied to the names of files still being written by applications.
val IN_PROGRESS = ".inprogress"
val DEFAULT_LOG_DIR = "/tmp/spark-events"
private val LOG_FILE_PERMISSIONS = new FsPermission(Integer.parseInt("770", 8).toShort)
// A cache for compression codecs to avoid creating the same codec many times
private val codecMap = new mutable.HashMap[String, CompressionCodec]
/**
* Write metadata about an event log to the given stream.
* The metadata is encoded in the first line of the event log as JSON.
*
* @param logStream Raw output stream to the event log file.
*/
def initEventLog(
logStream: OutputStream,
testing: Boolean,
loggedEvents: ArrayBuffer[JValue]): Unit = {
val metadata = SparkListenerLogStart(SPARK_VERSION)
val eventJson = JsonProtocol.logStartToJson(metadata)
val metadataJson = compact(eventJson) + "\n"
logStream.write(metadataJson.getBytes(StandardCharsets.UTF_8))
if (testing && loggedEvents != null) {
loggedEvents += eventJson
}
}
/**
* Return a file-system-safe path to the log file for the given application.
*
* Note that because we currently only create a single log file for each application,
* we must encode all the information needed to parse this event log in the file name
* instead of within the file itself. Otherwise, if the file is compressed, for instance,
* we won't know which codec to use to decompress the metadata needed to open the file in
* the first place.
*
* The log file name will identify the compression codec used for the contents, if any.
* For example, app_123 for an uncompressed log, app_123.lzf for an LZF-compressed log.
*
* @param logBaseDir Directory where the log file will be written.
* @param appId A unique app ID.
* @param appAttemptId A unique attempt id of appId. May be the empty string.
* @param compressionCodecName Name to identify the codec used to compress the contents
* of the log, or None if compression is not enabled.
* @return A path which consists of file-system-safe characters.
*/
def getLogPath(
logBaseDir: URI,
appId: String,
appAttemptId: Option[String],
compressionCodecName: Option[String] = None): String = {
val base = logBaseDir.toString.stripSuffix("/") + "/" + sanitize(appId)
val codec = compressionCodecName.map("." + _).getOrElse("")
if (appAttemptId.isDefined) {
base + "_" + sanitize(appAttemptId.get) + codec
} else {
base + codec
}
}
private def sanitize(str: String): String = {
str.replaceAll("[ :/]", "-").replaceAll("[.${}'\"]", "_").toLowerCase(Locale.ROOT)
}
/**
* Opens an event log file and returns an input stream that contains the event data.
*
* @return input stream that holds one JSON record per line.
*/
def openEventLog(log: Path, fs: FileSystem): InputStream = {
val in = new BufferedInputStream(fs.open(log))
// Compression codec is encoded as an extension, e.g. app_123.lzf
// Since we sanitize the app ID to not include periods, it is safe to split on it
val logName = log.getName.stripSuffix(IN_PROGRESS)
val codecName: Option[String] = logName.split("\\.").tail.lastOption
val codec = codecName.map { c =>
codecMap.getOrElseUpdate(c, CompressionCodec.createCodec(new SparkConf, c))
}
try {
codec.map(_.compressedInputStream(in)).getOrElse(in)
} catch {
case e: Exception =>
in.close()
throw e
}
}
}
| MLnick/spark | core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala | Scala | apache-2.0 | 13,563 |
package uk.co.turingatemyhamster.shortbol
package ops
import scalaz.Scalaz._
import scalaz._
import monocle.{Lens, Prism}
import RewriteRule.{FilteringEq, Logging, MaybeRewritten, Rewritten}
import uk.co.turingatemyhamster.shortbol.ops.Eval.EvalState
import uk.co.turingatemyhamster.shortbol.sharedAst.{Identifier, Literal}
import scala.annotation.implicitNotFound
/**
* Created by nmrp3 on 09/11/16.
*/
trait RewriteRule[T] {
self =>
def apply(t: T): MaybeRewritten[T]
// def eval(t: T): Rewritten[T] = apply(t).fold(_.point[Rewritten], identity)
def at[AT, U](at: AT)(implicit rwAt: RewriteAtBuilder[AT, U, T]): RewriteRule[U] = rwAt(at)(this)
def or(rr: RewriteRule[T]): RewriteRule[T] = new RewriteRule[T] {
override def apply(t: T) = for {
selfRw <- self apply t
rrRw <- selfRw match {
case -\/(l) =>
rr apply l
case r@ \/-(_) =>
r.point[EvalState]
}
} yield rrRw
}
def andThen(rr: RewriteRule[T]): RewriteRule[T] = new RewriteRule[T] {
override def apply(t: T) = for {
selfRw <- self apply t
rrRw <- selfRw fold (
rr apply _,
_.run match {
case (extras, t2) => for {
t2Rw <- rr apply t2
} yield t2Rw fold (
_.set(extras).right,
_.run match {
case (t2ex, t3) =>
t3.set(extras ++ t2ex).right
}
)
}
)
} yield rrRw
}
def description: Option[String] = None
def log(name: String): RewriteRule[T] = new RewriteRule[T] {
override def description = self.description
private def descriptionString = description getOrElse ""
override def apply(t: T) = for {
res <- self apply t
_ = println(s"$name <i> $descriptionString at $t")
_ = println(s"$name <o> $descriptionString ${res.fold(_ => "unchanged", _ => "rewritten")} at $t")
} yield res
}
}
object RewriteRule {
implicit class FilteringOps[S, T](_l: Lens[S, T]) {
def :==(t: T) = FilteringEq[S, T](_l, t)
}
case class FilteringEq[S, T](l: Lens[S, T], t: T)
implicit class LoggingOps[T](_t: T) {
def log(msg: String): Logging[T] = Logging(msg, _t)
}
case class Logging[T](msg: String, t: T)
def ofType[I](i: I) = OfType(i)
case class OfType[I](i: I)
type MaybeRewritten[T] = Eval.EvalState[T \/ Rewritten[T]]
type Rewritten[T] = Writer[List[longhandAst.InstanceExp], T]
// def Rewritten[T](w: InstanceExpWriter[T]): Rewritten[T] =
// IndexedStateT[InstanceExpWriter, EvalContext, EvalContext, T](s => w.map(s -> _))
def rewrite(r: RewriteRule[longhandAst.SBFile], sf: longhandAst.SBFile): EvalState[longhandAst.SBFile] = {
var depth = 0
def rewriteStep(sf: longhandAst.SBFile): EvalState[longhandAst.SBFile] = {
if(depth > 3) throw new IllegalStateException("Recursed too deeply during rewrite rules. Perhaps a rewrite rule is misfiring?")
depth += 1
for {
rsf <- r(sf)
res <- rsf.fold(_.point[EvalState],
_.run match {
case (extras, newSf) =>
rewriteStep(longhandAst.SBFile(newSf.tops ::: extras))
}
)
} yield res
}
rewriteStep(sf)
}
def apply[F, T](f: F)(implicit b: Builder[F, T]): RewriteRule[T] = b apply f
def noop[T]: RewriteRule[T] = RewriteRule { (t: T) => t.left[Rewritten[T]] }
@implicitNotFound("Don't know how to build a RewriteRule[${T}] from ${F}")
trait Builder[F, T] {
def apply(f: F): RewriteRule[T]
}
implicit def fromMaybeRewritten[T]: Builder[T => MaybeRewritten[T], T] = new Builder[(T) => MaybeRewritten[T], T] {
override def apply(f: (T) => MaybeRewritten[T]) = new RewriteRule[T] {
override def apply(t: T) = f(t)
}
}
implicit def fromRewrittenDisjunction[T]: Builder[(T => T \/ Rewritten[T]), T] = new Builder[(T) => Disjunction[T, Rewritten[T]], T] {
override def apply(f: (T) => Disjunction[T, Rewritten[T]]) = RewriteRule { (t: T) =>
f(t).point[EvalState]
}
}
implicit def fromDisjunction[T]: Builder[(T => T \/ T), T] = new Builder[(T) => Disjunction[T, T], T] {
override def apply(f: (T) => Disjunction[T, T]) = RewriteRule { (t: T) =>
f(t).map(_.point[Rewritten])
}
}
implicit def fromState[T]: Builder[T => EvalState[T], T] = new Builder[(T) => EvalState[T], T] {
override def apply(f: (T) => EvalState[T]) = RewriteRule { (t: T) =>
for {
ft <- f(t)
} yield ft.point[Rewritten].right[T]
}
}
implicit def fromFunc[T]: Builder[T => T, T] = new Builder[(T) => T, T] {
override def apply(f: (T) => T) = RewriteRule { (t: T) =>
f(t).right[T]
}
}
implicit def fromOptionFunc[T]: Builder[T => Option[T], T] = new Builder[(T) => Option[T], T] {
override def apply(f: (T) => Option[T]) = RewriteRule { (t: T) =>
f(t) match {
case None => t.left[Rewritten[T]]
case Some(ft) => ft.point[Rewritten].right[T]
}
}
}
implicit def fromPartialFunc[T]: Builder[PartialFunction[T, T], T] = new Builder[PartialFunction[T, T], T] {
override def apply(f: PartialFunction[T, T]) = RewriteRule(f.lift)
}
implicit def fromFlatMap[T]: Builder[T => RewriteRule[T], T] = new Builder[(T) => RewriteRule[T], T] {
override def apply(f: (T) => RewriteRule[T]) = RewriteRule { (t: T) =>
(f(t): RewriteRule[T])(t)
}
}
implicit def fromStateFlatMap[T]: Builder[T => EvalState[RewriteRule[T]], T] = new Builder[(T) => EvalState[RewriteRule[T]], T] {
override def apply(f: (T) => EvalState[RewriteRule[T]]) = RewriteRule { (t: T) =>
for {
rr <- f(t)
res <- rr(t)
} yield res
}
}
implicit def fromOptionFlatMap[T]: Builder[T => Option[RewriteRule[T]], T] = new Builder[(T) => Option[RewriteRule[T]], T] {
override def apply(f: (T) => Option[RewriteRule[T]]) = RewriteRule { (t: T) =>
f(t) match {
case None => t.left[Rewritten[T]].point[EvalState]
case Some(rr) => rr(t)
}
}
}
object allElements
object *
}
@implicitNotFound("Don't know how to use a ${AT} to rewrite ${T} at ${U}")
trait RewriteAtBuilder[AT, U, T] {
def apply(at: AT): RewriteAt[U, T]
}
@implicitNotFound("Don't know how to rewrite ${T} at ${U}")
trait RewriteAt[U, T] {
def apply(rr: RewriteRule[T]): RewriteRule[U]
}
object RewriteAtBuilder {
implicit def rewriteAtLog[AT, S, T](rab: RewriteAtBuilder[AT, S, T]): RewriteAtBuilder[RewriteRule.Logging[AT], S, T] = new RewriteAtBuilder[Logging[AT], S, T] {
override def apply(at: Logging[AT]) = new RewriteAt[S, T] {
override def apply(rr: RewriteRule[T]) = rab(at.t)(rr).log(at.msg)
}
}
implicit def rewriteAtLens[S, T]: RewriteAtBuilder[Lens[S, T], S, T] = new RewriteAtBuilder[Lens[S, T], S, T] {
override def apply(at: Lens[S, T]) = new RewriteAt[S, T] {
override def apply(rr: RewriteRule[T]) = RewriteRule { (s: S) =>
val lsets = at.set(_: T)(s)
for {
rrs <- rr(at.get(s))
} yield rrs.bimap(
lsets,
_ map lsets
)
}
}
}
implicit def rewriteAtPrism[S, T]: RewriteAtBuilder[Prism[S, T], S, T] = new RewriteAtBuilder[Prism[S, T], S, T] {
override def apply(at: Prism[S, T]) = new RewriteAt[S, T] {
override def apply(rr: RewriteRule[T]) = RewriteRule { (s: S) =>
val psets = at.set(_: T)(s)
at.getOrModify(s).fold(
_.left[Rewritten[S]].point[EvalState] : MaybeRewritten[S],
r => for {
rrr <- rr(r)
} yield rrr.fold(
l => psets(l).left[Rewritten[S]],
r => (r map psets).right[S])
) : MaybeRewritten[S]
}
}
}
implicit def filterRewrite[T]: RewriteAtBuilder[T => Boolean, T, T] = new RewriteAtBuilder[T => Boolean, T, T] {
override def apply(at: (T) => Boolean) = new RewriteAt[T, T] {
override def apply(rr: RewriteRule[T]) = RewriteRule { (t: T) =>
if(at(t)) rr(t)
else t.left[Rewritten[T]].point[EvalState]
}
}
}
implicit def rewriteAtFilteringEq[S, T]: RewriteAtBuilder[RewriteRule.FilteringEq[S, T], S, S] = new RewriteAtBuilder[RewriteRule.FilteringEq[S, T], S, S] {
override def apply(at: FilteringEq[S, T]) = filterRewrite[S] { (s: S) =>
at.l.get(s) == at.t
}
}
implicit def rewriteAtAllElements[T]: RewriteAtBuilder[RewriteRule.allElements.type, List[T], T] = new RewriteAtBuilder[RewriteRule.allElements.type, List[T], T] {
override def apply(at: RewriteRule.allElements.type) = new RewriteAt[List[T], T] {
override def apply(rr: RewriteRule[T]) = RewriteRule { (ts: List[T]) => for {
rrts <- (ts map rr.apply).sequenceU
} yield
if (rrts exists (_.isRight)) {
(rrts collect {
case -\/(l) => l.point[Rewritten]
case \/-(r) => r
}).sequenceU.right[List[T]]
} else {
(rrts collect { case -\/(l) => l }).left[Rewritten[List[T]]]
}
}
}
}
implicit def rewriteAtPair[F, G, S, T](implicit rwF: RewriteAtBuilder[F, S, T], rwG: RewriteAtBuilder[G, S, T]):
RewriteAtBuilder[(F, G), S, T] = new RewriteAtBuilder[(F, G), S, T] {
override def apply(pair: (F, G)) = new RewriteAt[S, T] {
override def apply(rr: RewriteRule[T]) = (rr at pair._1) andThen (rr at pair._2)
}
}
import longhandAst._
import optics.{longhand => ol}
import RewriteRule.{FilteringOps, OfType}
implicit def rewriteAtOfType[I](implicit iToIdentifier: I => Identifier):
RewriteAtBuilder[OfType[I], ConstructorApp, List[PropertyExp]] =
new RewriteAtBuilder[OfType[I], ConstructorApp, List[PropertyExp]] {
override def apply(ofT: OfType[I]) = new RewriteAt[ConstructorApp, List[PropertyExp]] {
override def apply(rr: RewriteRule[List[PropertyExp]]) = rr at
ol.ConstructorApp.body at
((ol.ConstructorApp.cstr composeLens ol.TpeConstructor.tpe) :== iToIdentifier(ofT.i))
}
}
implicit def rewriteAtPropertyToLiteral[I](implicit iToIdentifier: I => Identifier):
RewriteAtBuilder[I, List[PropertyExp], Literal] =
new RewriteAtBuilder[I, List[PropertyExp], Literal] {
override def apply(prop: I) = new RewriteAt[List[PropertyExp], Literal] {
override def apply(rr: RewriteRule[Literal]) = rr at
ol.PropertyValue.Literal.value at
ol.PropertyValue.asLiteral at
ol.PropertyExp.value at
(ol.PropertyExp.property :== prop) at
RewriteRule.allElements
}
}
implicit val rewriteAtStarToLiteral:
RewriteAtBuilder[RewriteRule.*.type, List[PropertyExp], Literal] =
new RewriteAtBuilder[RewriteRule.*.type, List[PropertyExp], Literal] {
override def apply(star: RewriteRule.*.type) = new RewriteAt[List[PropertyExp], Literal] {
override def apply(rr: RewriteRule[Literal]) = rr at
ol.PropertyValue.Literal.value at
ol.PropertyValue.asLiteral at
ol.PropertyExp.value at
RewriteRule.allElements
}
}
implicit def rewriteAtPropertyToReference[I](implicit iToIdentifier: I => Identifier):
RewriteAtBuilder[I, List[PropertyExp], PropertyValue.Reference] =
new RewriteAtBuilder[I, List[PropertyExp], PropertyValue.Reference] {
override def apply(prop: I) = new RewriteAt[List[PropertyExp], PropertyValue.Reference] {
override def apply(rr: RewriteRule[PropertyValue.Reference]) = rr at
ol.PropertyValue.asReference at
ol.PropertyExp.value at
(ol.PropertyExp.property :== prop) at
RewriteRule.allElements
}
}
implicit val rewriteAtStarToReference:
RewriteAtBuilder[RewriteRule.*.type, List[PropertyExp], PropertyValue.Reference] =
new RewriteAtBuilder[RewriteRule.*.type, List[PropertyExp], PropertyValue.Reference] {
override def apply(star: RewriteRule.*.type) = new RewriteAt[List[PropertyExp], PropertyValue.Reference] {
override def apply(rr: RewriteRule[PropertyValue.Reference]) = rr at
ol.PropertyValue.asReference at
ol.PropertyExp.value at
RewriteRule.allElements
}
}
implicit def rewriteAtPropertyToValue[I](implicit iToIdentifier: I => Identifier):
RewriteAtBuilder[I, List[PropertyExp], PropertyValue] =
new RewriteAtBuilder[I, List[PropertyExp], PropertyValue] {
override def apply(prop: I) = new RewriteAt[List[PropertyExp], PropertyValue] {
override def apply(rr: RewriteRule[PropertyValue]) = rr at
ol.PropertyExp.value at
(ol.PropertyExp.property :== prop) at
RewriteRule.allElements
}
}
implicit def rewriteAtPropertyToProperties[I](implicit iToIdentifier: I => Identifier):
RewriteAtBuilder[I, List[PropertyExp], List[PropertyExp]] =
new RewriteAtBuilder[I, List[PropertyExp], List[PropertyExp]] {
override def apply(prop: I) = new RewriteAt[List[PropertyExp], List[PropertyExp]] {
override def apply(rr: RewriteRule[List[PropertyExp]]) = rr at
ol.ConstructorApp.body at
ol.PropertyValue.Nested.value at
ol.PropertyValue.asNested at
ol.PropertyExp.value at
(ol.PropertyExp.property :== prop) at
RewriteRule.allElements
}
}
}
| drdozer/shortbol | shortbol/core/shared/src/main/scala/uk/co/turingatemyhamster/shortbol/ops/RewriteRule.scala | Scala | apache-2.0 | 13,317 |
package com.markglh.blog
import java.util.UUID
import com.typesafe.scalalogging.LazyLogging
import io.circe._
import io.circe.generic.auto._
import io.getquill.{CassandraAsyncContext, SnakeCase}
import org.http4s._
import org.http4s.client.blaze.PooledHttp1Client
import org.http4s.dsl._
import scala.concurrent.ExecutionContext
object BeaconService extends LazyLogging {
val client = PooledHttp1Client()
implicit def circeJsonDecoder[A](implicit decoder: Decoder[A]) = org.http4s.circe.jsonOf[A]
implicit def circeJsonEncoder[A](implicit encoder: Encoder[A]) = org.http4s.circe.jsonEncoderOf[A]
def routes(beaconRepo: BeaconRepo[CassandraAsyncContext[SnakeCase]])(implicit ec: ExecutionContext) = HttpService {
case request@GET -> Root / "beacons" / "locations" / locationId =>
logger.debug(s"****Querying for locationId:$locationId")
Ok(beaconRepo.findBeaconByLocation(UUID.fromString(locationId)))
}
}
| markglh/composing-microservices-with-sbt-docker | beacon-service/src/main/scala/com/markglh/blog/BeaconService.scala | Scala | apache-2.0 | 943 |
/**
* Created by compr on 06-05-2017.
*/
object MaximalTourismMemoization {
import scala.collection.mutable.Map
def main(args: Array[String]) {
val sc = new java.util.Scanner (System.in);
var n = sc.nextInt();
var m = sc.nextInt();
var route = Array.ofDim[Int](m,2);
var distinctStartCityConnections:Map[Int,List[Int]] = Map()
for(route_i <- 0 to m-1) {
for(route_j <- 0 to 2-1){
route(route_i)(route_j) = sc.nextInt();
}
}
for (i <- 0 until m) {
// Ignore self connected cities
if (route(i)(0) != route(i)(1)) {
if (distinctStartCityConnections.contains(route(i)(0))) {
distinctStartCityConnections(route(i)(0)) = route(i)(1) :: distinctStartCityConnections(route(i)(0))
} else {
distinctStartCityConnections(route(i)(0)) = List(route(i)(1))
}
if (distinctStartCityConnections.contains(route(i)(1))) {
distinctStartCityConnections(route(i)(1)) = route(i)(0) :: distinctStartCityConnections(route(i)(1))
} else {
distinctStartCityConnections(route(i)(1)) = List(route(i)(0))
}
}
}
var cityMaxConnections:Map[Int,Int] = Map()
//println("distinctStartCityConnections - " + distinctStartCityConnections)
// Write Your Code Here
var maxConnections: Int = 0
for(i <- 1 to n) {
if (distinctStartCityConnections.contains(i)) {
maxConnections = maxConnections.max(getConnectedCities(distinctStartCityConnections, i, i, 0, List(), cityMaxConnections))
cityMaxConnections += (i -> maxConnections)
//println("i - " + i + " , maxConnections - " + maxConnections)
}
}
println(maxConnections)
}
def getConnectedCities(distinctStartCityConnections:Map[Int,List[Int]], startCity: Int, currentCity: Int, connections: Int, connectedCities: List[Int], cityMaxConnections:Map[Int,Int]): Int = {
if (connectedCities.contains(currentCity)) connections
else {
val nextCityList = distinctStartCityConnections(currentCity)
//println(nextCityList)
var interimMax = connections
for (nextCity <- nextCityList) {
if (cityMaxConnections.contains(nextCity)) {
interimMax = connections + cityMaxConnections(nextCity)
}
else {
//println("startCity - " + startCity + " , currentCity - " + currentCity + " , nextCity - " + nextCity + " , connections - " + connections + " , connectedCities - " + connectedCities)
interimMax = interimMax.max(getConnectedCities(distinctStartCityConnections, startCity, nextCity, connections + 1, currentCity :: connectedCities, cityMaxConnections))
}
}
interimMax
}
}
}
| comprakash/learning-scala | rookierank-3/src/main/scala/MaximalTourismMemoization.scala | Scala | gpl-3.0 | 2,835 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding.source
import com.twitter.bijection.Injection
import java.io.Serializable
/** Handles the error checking for Injection inversion
* if check fails, it will throw an unrecoverable exception stopping the job
* TODO: probably belongs in Bijection
*/
trait CheckedInversion[T,U] extends Serializable {
def injection: Injection[T,U]
def apply(input: U): Option[T]
}
| vidyar/twitterscalding | scalding-core/src/main/scala/com/twitter/scalding/source/CheckedInversion.scala | Scala | apache-2.0 | 955 |
package org.jetbrains.plugins.scala.lang.optimize
package generated
class OptimizeImportsSimpleTest extends OptimizeImportsTestBase {
//This class was generated by build script, please don't change this
override def folderPath: String = super.folderPath + "simple/"
protected override def rootPath(): String = folderPath
def testSorted() = doTest()
def testSortedInPackage() = doTest()
def testTwoExpressions() = doTest()
def testDeleteBraces() = doTest()
def testDontSaveNotResolved() = doTest()
def testImportChainUsed() = doTest()
def testLanguageFeatures() = doTest()
def testNewLines() = doTest()
def testOneImport() = doTest()
def testScalaDoc(): Unit = doTest()
def testSCL7275(): Unit = doTest()
def testSomeTrait() = doTest()
def testUnusedImportChain() = doTest()
def testUnusedSelector() = doTest()
def testUsedImport() = doTest()
} | advancedxy/intellij-scala | test/org/jetbrains/plugins/scala/lang/optimize/generated/OptimizeImportsSimpleTest.scala | Scala | apache-2.0 | 897 |
/*
* Copyright (c) 2014-2019 Israel Herraiz <isra@herraiz.org>
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use, copy,
* modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
// --------------------
// Code for example 3.9
// --------------------
package chap03
import Ex07.foldRight
object Ex09 {
def length[A](l: List[A]): Int = {
foldRight(l,0)((e,s) => 1+s)
}
}
| iht/fpinscala | src/main/scala/chap03/ex09.scala | Scala | mit | 1,356 |
package colang.ast.parsed.routines
import colang.ast.parsed.{RootNamespace, Type}
import colang.ast.raw
import colang.issues.Issue
import colang.tokens.NativeKeyword
private[routines] object RegisterTypes {
/**
* "Registers" all types in the root namespace.
* @param rootNamespace root namespace
* @param typeDefs type definitions
* @return (new types, encountered issues)
*/
def registerTypes(rootNamespace: RootNamespace, typeDefs: Seq[raw.TypeDefinition]): (Seq[Type], Seq[Issue]) = {
val result = typeDefs map { typeDef =>
val type_ = new Type(
name = typeDef.name.value,
scope = Some(rootNamespace),
definition = Some(typeDef),
native = typeDef.specifiers.has(classOf[NativeKeyword]));
val issues = rootNamespace.tryAdd(type_)
(type_, issues)
}
val types = result map { _._1 }
val issues = result flatMap { _._2 }
(types, issues)
}
}
| merkispavel/colang | src/main/scala/colang/ast/parsed/routines/RegisterTypes.scala | Scala | mit | 940 |
package org.aprsdroid.app
import com.jazzido.PacketDroid.AudioBufferProcessor
class AfskInWrapper(hq : Boolean, au : AfskUploader, in_type : Int, samplerate : Int) {
var abp = if (!hq) new AudioBufferProcessor(au) else null
var ad = if (hq) new AfskDemodulator(au, in_type, samplerate) else null
def start() = if (!hq) abp.start() else ad.start()
def close() = if (!hq) abp.stopRecording() else ad.close()
}
| ge0rg/aprsdroid | src/backend/AfskInWrapper.scala | Scala | gpl-2.0 | 415 |
/*
* Copyright 2015 Textocat
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.textocat.textokit.morph
import java.{util => ju}
import com.textocat.textokit.commons.cas.FSUtils
import com.textocat.textokit.morph.dictionary.resource.{GramModel, MorphDictionaryUtils}
import com.textocat.textokit.morph.fs.{Word, Wordform}
import com.textocat.textokit.morph.model.MorphConstants._
import com.textocat.textokit.postagger.MorphCasUtils
import org.apache.uima.fit.util.JCasUtil
import org.apache.uima.jcas.JCas
import org.apache.uima.jcas.tcas.Annotation
import scala.collection.JavaConversions._
/**
* @author Rinat Gareev
*/
case class RichWord(wordform: Wordform, text: String, lemma: Option[String], partOfSpeech: String, gramCats: Map[String, String]) {
def anno = wordform.getWord
def begin = anno.getBegin
def end = anno.getEnd
}
object RichWord {
val NoPartOfSpeech = "null"
}
class RichWordFactory(gramModel: GramModel) {
import RichWord._
// TODO note that this is too specific, bind to RNC dictionary extension
private val PartOfSpeechRoots = Set(POST, Abbr, Anum, Apro, Prnt)
private val partOfSpeechMask = {
val mask = new ju.BitSet()
PartOfSpeechRoots.map(gramModel.getGrammemWithChildrenBits(_, true)).foreach(mask.or)
mask
}
private val gramCatMasks = List(ANim, GNdr, NMbr, CAse, ASpc, TRns, PErs, TEns, MOod, INvl, VOic)
.map { gcat => gcat -> gramModel.getGrammemWithChildrenBits(gcat, true)
}.toMap + (POST -> partOfSpeechMask)
def seqFromSpan(jCas: JCas, span: Annotation): IndexedSeq[RichWord] =
JCasUtil.selectCovered(jCas, classOf[Word], span).map(word2RichWord).toIndexedSeq
implicit def word2RichWord(w: Word): RichWord = {
val wf = MorphCasUtils.requireOnlyWordform(w)
val wfbs = MorphDictionaryUtils.toGramBits(gramModel, FSUtils.toList(wf.getGrammems))
val partOfSpeech = {
val grs = wfbs.clone().asInstanceOf[ju.BitSet]
grs.and(partOfSpeechMask)
if (grs.isEmpty) NoPartOfSpeech
else gramModel.toGramSet(grs).mkString("&")
}
val gramCats = (for ((cat, catMask) <- gramCatMasks) yield {
val grs = wfbs.clone().asInstanceOf[ju.BitSet]
grs.and(catMask)
if (grs.isEmpty) None
else Some(cat -> gramModel.toGramSet(grs).head)
}).flatten.toMap
RichWord(wf, w.getCoveredText, Option(wf.getLemma), partOfSpeech, gramCats)
}
}
| textocat/textokit-core | Textokit.PhraseRecognizer.SPC/src/main/scala/com/textocat/textokit/morph/RichWord.scala | Scala | apache-2.0 | 2,937 |
package scopt
/**
* this file was copied from "https://github.com/scopt/scopt", a light-weight command-line arg parsing library.
*/
import java.net.UnknownHostException
import java.text.ParseException
import scala.collection.mutable.{ListBuffer, ListMap}
trait Read[A] { self =>
def arity: Int
def tokensToRead: Int = if (arity == 0) 0 else 1
def reads: String => A
def map[B](f: A => B): Read[B] = new Read[B] {
val arity = self.arity
val reads = self.reads andThen f
}
}
object Read {
import java.io.File
import java.net.{InetAddress, URI}
import java.text.SimpleDateFormat
import java.util.{Calendar, GregorianCalendar, Locale}
import scala.concurrent.duration.Duration
def reads[A](f: String => A): Read[A] = new Read[A] {
val arity = 1
val reads = f
}
implicit val intRead: Read[Int] = reads { _.toInt }
implicit val stringRead: Read[String] = reads { identity }
implicit val doubleRead: Read[Double] = reads { _.toDouble }
implicit val booleanRead: Read[Boolean] =
reads { _.toLowerCase match {
case "true" => true
case "false" => false
case "yes" => true
case "no" => false
case "1" => true
case "0" => false
case s =>
throw new IllegalArgumentException("'" + s + "' is not a boolean.")
}}
implicit val longRead: Read[Long] = reads { _.toLong }
implicit val bigIntRead: Read[BigInt] = reads { BigInt(_) }
implicit val bigDecimalRead: Read[BigDecimal] = reads { BigDecimal(_) }
implicit val yyyymmdddRead: Read[Calendar] = calendarRead("yyyy-MM-dd")
def calendarRead(pattern: String): Read[Calendar] = calendarRead(pattern, Locale.getDefault)
def calendarRead(pattern: String, locale: Locale): Read[Calendar] =
reads { s =>
val fmt = new SimpleDateFormat(pattern)
val c = new GregorianCalendar
c.setTime(fmt.parse(s))
c
}
implicit val fileRead: Read[File] = reads { new File(_) }
implicit val uriRead: Read[URI] = reads { new URI(_) }
implicit val inetAddress: Read[InetAddress] = reads { InetAddress.getByName(_) }
implicit val durationRead: Read[Duration] =
reads { try {
Duration(_)
} catch {
case e: NumberFormatException => throw new ParseException(e.getMessage, -1)
}}
implicit def tupleRead[A1: Read, A2: Read]: Read[(A1, A2)] = new Read[(A1, A2)] {
val arity = 2
val reads = { (s: String) =>
splitKeyValue(s) match {
case (k, v) => implicitly[Read[A1]].reads(k) -> implicitly[Read[A2]].reads(v)
}
}
}
private def splitKeyValue(s: String): (String, String) =
s.indexOf('=') match {
case -1 => throw new IllegalArgumentException("Expected a key=value pair")
case n: Int => (s.slice(0, n), s.slice(n + 1, s.length))
}
implicit val unitRead: Read[Unit] = new Read[Unit] {
val arity = 0
val reads = { (s: String) => () }
}
val sep = ","
// reads("1,2,3,4,5") == Seq(1,2,3,4,5)
implicit def seqRead[A: Read]: Read[Seq[A]] = reads { (s: String) =>
s.split(sep).map(implicitly[Read[A]].reads)
}
// reads("1=false,2=true") == Map(1 -> false, 2 -> true)
implicit def mapRead[K: Read, V: Read]: Read[Map[K,V]] = reads { (s: String) =>
s.split(sep).map(implicitly[Read[(K,V)]].reads).toMap
}
// reads("1=false,1=true") == List((1 -> false), (1 -> true))
implicit def seqTupleRead[K: Read, V: Read]: Read[Seq[(K,V)]] = reads { (s: String) =>
s.split(sep).map(implicitly[Read[(K,V)]].reads).toSeq
}
}
trait Zero[A] {
def zero: A
}
object Zero {
def zero[A](f: => A): Zero[A] = new Zero[A] {
val zero = f
}
implicit val intZero: Zero[Int] = zero(0)
implicit val unitZero: Zero[Unit] = zero(())
}
object Validation {
def validateValue[A](vs: Seq[A => Either[String, Unit]])(value: A): Either[Seq[String], Unit] = {
val results = vs map {_.apply(value)}
(OptionDef.makeSuccess[Seq[String]] /: results) { (acc, r) =>
(acc match {
case Right(_) => Seq[String]()
case Left(xs) => xs
}) ++ (r match {
case Right(_) => Seq[String]()
case Left(x) => Seq[String](x)
}) match {
case Seq() => acc
case xs => Left(xs)
}
}
}
}
private[scopt] sealed trait OptionDefKind {}
private[scopt] case object Opt extends OptionDefKind
private[scopt] case object Note extends OptionDefKind
private[scopt] case object Arg extends OptionDefKind
private[scopt] case object Cmd extends OptionDefKind
private[scopt] case object Head extends OptionDefKind
private[scopt] case object Check extends OptionDefKind
/** <code>scopt.immutable.OptionParser</code> is instantiated within your object,
* set up by an (ordered) sequence of invocations of
* the various builder methods such as
* <a href="#opt[A](Char,String)(Read[A]):OptionDef[A,C]"><code>opt</code></a> method or
* <a href="#arg[A](String)(Read[A]):OptionDef[A,C]"><code>arg</code></a> method.
* {{{
* val parser = new scopt.OptionParser[Config]("scopt") {
* head("scopt", "3.x")
* opt[Int]('f', "foo") action { (x, c) =>
* c.copy(foo = x) } text("foo is an integer property")
* opt[File]('o', "out") required() valueName("<file>") action { (x, c) =>
* c.copy(out = x) } text("out is a required file property")
* opt[(String, Int)]("max") action { case ((k, v), c) =>
* c.copy(libName = k, maxCount = v) } validate { x =>
* if (x._2 > 0) success else failure("Value <max> must be >0")
* } keyValueName("<libname>", "<max>") text("maximum count for <libname>")
* opt[Unit]("verbose") action { (_, c) =>
* c.copy(verbose = true) } text("verbose is a flag")
* note("some notes.\\n")
* help("help") text("prints this usage text")
* arg[File]("<file>...") unbounded() optional() action { (x, c) =>
* c.copy(files = c.files :+ x) } text("optional unbounded args")
* cmd("update") action { (_, c) =>
* c.copy(mode = "update") } text("update is a command.") children(
* opt[Unit]("not-keepalive") abbr("nk") action { (_, c) =>
* c.copy(keepalive = false) } text("disable keepalive"),
* opt[Boolean]("xyz") action { (x, c) =>
* c.copy(xyz = x) } text("xyz is a boolean property")
* )
* }
* // parser.parse returns Option[C]
* parser.parse(args, Config()) map { config =>
* // do stuff
* } getOrElse {
* // arguments are bad, usage message will have been displayed
* }
* }}}
*/
abstract case class OptionParser[C](programName: String) {
protected val options = new ListBuffer[OptionDef[_, C]]
protected val helpOptions = new ListBuffer[OptionDef[_, C]]
def errorOnUnknownArgument: Boolean = true
def showUsageOnError: Boolean = helpOptions.isEmpty
def terminate(exitState: Either[String, Unit]): Unit =
exitState match {
case Left(_) => sys.exit(1)
case Right(_) => sys.exit(0)
}
def reportError(msg: String): Unit = {
Console.err.println("Error: " + msg)
}
def reportWarning(msg: String): Unit = {
Console.err.println("Warning: " + msg)
}
def showTryHelp(): Unit = {
def oxford(xs: List[String]): String = xs match {
case a :: b :: Nil => a + " or " + b
case _ => (xs.dropRight(2) :+ xs.takeRight(2).mkString(", or ")).mkString(", ")
}
Console.err.println("Try " + oxford(helpOptions.toList map {_.fullName}) + " for more information.")
}
/** adds usage text. */
def head(xs: String*): OptionDef[Unit, C] = makeDef[Unit](Head, "") text(xs.mkString(" "))
/** adds an option invoked by `--name x`.
* @param name name of the option
*/
def opt[A: Read](name: String): OptionDef[A, C] = makeDef(Opt, name)
/** adds an option invoked by `-x value` or `--name value`.
* @param x name of the short option
* @param name name of the option
*/
def opt[A: Read](x: Char, name: String): OptionDef[A, C] =
opt[A](name) abbr(x.toString)
/** adds usage text. */
def note(x: String): OptionDef[Unit, C] = makeDef[Unit](Note, "") text(x)
/** adds an argument invoked by an option without `-` or `--`.
* @param name name in the usage text
*/
def arg[A: Read](name: String): OptionDef[A, C] = makeDef(Arg, name) required()
/** adds a command invoked by an option without `-` or `--`.
* @param name name of the command
*/
def cmd(name: String): OptionDef[Unit, C] = makeDef[Unit](Cmd, name)
/** adds an option invoked by `--name` that displays usage text and exits.
* @param name name of the option
*/
def help(name: String): OptionDef[Unit, C] = {
val o = opt[Unit](name) action { (x, c) =>
showUsage()
terminate(Right(()))
c
}
helpOptions += o
o
}
/** adds an option invoked by `--name` that displays header text and exits.
* @param name name of the option
*/
def version(name: String): OptionDef[Unit, C] =
opt[Unit](name) action { (x, c) =>
showHeader()
terminate(Right(()))
c
}
/** adds final check. */
def checkConfig(f: C => Either[String, Unit]): OptionDef[Unit, C] =
makeDef[Unit](Check, "") validateConfig(f)
def showHeader() {
Console.out.println(header)
}
def header: String = {
import OptionDef._
(heads map {_.usage}).mkString(NL)
}
def showUsage(): Unit = {
Console.out.println(usage)
}
def showUsageAsError(): Unit = {
Console.err.println(usage)
}
def usage: String = {
import OptionDef._
val unsorted = options filter { o => o.kind != Head && o.kind != Check && !o.isHidden }
val (unseen, xs) = unsorted partition {_.hasParent} match {
case (p, np) => (ListBuffer() ++ p, ListBuffer() ++ np)
}
while (!unseen.isEmpty) {
for {
x <- xs
} {
val cs = unseen filter {_.getParentId == Some(x.id)}
unseen --= cs
xs.insertAll((xs indexOf x) + 1, cs)
}
}
val descriptions = xs map {_.usage}
(if (header == "") "" else header + NL) +
"Usage: " + commandExample(None) + NLNL +
descriptions.mkString(NL)
}
private[scopt] def commandName(cmd: OptionDef[_, C]): String =
(cmd.getParentId map { x =>
(commands find {_.id == x} map {commandName} getOrElse {""}) + " "
} getOrElse {""}) + cmd.name
private[scopt] def commandExample(cmd: Option[OptionDef[_, C]]): String = {
val text = new ListBuffer[String]()
text += cmd map {commandName} getOrElse programName
val parentId = cmd map {_.id}
val cs = commands filter {_.getParentId == parentId}
if (cs.nonEmpty) text += cs map {_.name} mkString("[", "|", "]")
val os = options.toSeq filter { case x => x.kind == Opt && x.getParentId == parentId }
val as = arguments filter {_.getParentId == parentId}
if (os.nonEmpty) text += "[options]"
if (cs exists { case x => arguments exists {_.getParentId == Some(x.id)}}) text += "<args>..."
else if (as.nonEmpty) text ++= as map {_.argName}
text.mkString(" ")
}
/** call this to express success in custom validation. */
def success: Either[String, Unit] = OptionDef.makeSuccess[String]
/** call this to express failure in custom validation. */
def failure(msg: String): Either[String, Unit] = Left(msg)
protected def heads: Seq[OptionDef[_, C]] = options.toSeq filter {_.kind == Head}
protected def nonArgs: Seq[OptionDef[_, C]] = options.toSeq filter { case x => x.kind == Opt || x.kind == Note }
protected def arguments: Seq[OptionDef[_, C]] = options.toSeq filter {_.kind == Arg}
protected def commands: Seq[OptionDef[_, C]] = options.toSeq filter {_.kind == Cmd}
protected def checks: Seq[OptionDef[_, C]] = options.toSeq filter {_.kind == Check}
protected def makeDef[A: Read](kind: OptionDefKind, name: String): OptionDef[A, C] =
updateOption(new OptionDef[A, C](parser = this, kind = kind, name = name))
private[scopt] def updateOption[A: Read](option: OptionDef[A, C]): OptionDef[A, C] = {
val idx = options indexWhere { _.id == option.id }
if (idx > -1) options(idx) = option
else options += option
option
}
/** parses the given `args`.
* @return `true` if successful, `false` otherwise
*/
def parse(args: Seq[String])(implicit ev: Zero[C]): Boolean =
parse(args, ev.zero) match {
case Some(x) => true
case None => false
}
/** parses the given `args`.
*/
def parse(args: Seq[String], init: C): Option[C] = {
var i = 0
val pendingOptions = ListBuffer() ++ (nonArgs filterNot {_.hasParent})
val pendingArgs = ListBuffer() ++ (arguments filterNot {_.hasParent})
val pendingCommands = ListBuffer() ++ (commands filterNot {_.hasParent})
val occurrences = ListMap[OptionDef[_, C], Int]().withDefaultValue(0)
var _config: C = init
var _error = false
def pushChildren(opt: OptionDef[_, C]): Unit = {
// commands are cleared to guarantee that it appears first
pendingCommands.clear()
pendingOptions insertAll (0, nonArgs filter { x => x.getParentId == Some(opt.id) &&
!pendingOptions.contains(x) })
pendingArgs insertAll (0, arguments filter { x => x.getParentId == Some(opt.id) &&
!pendingArgs.contains(x) })
pendingCommands insertAll (0, commands filter { x => x.getParentId == Some(opt.id) &&
!pendingCommands.contains(x) })
}
def handleError(msg: String): Unit = {
if (errorOnUnknownArgument) {
_error = true
reportError(msg)
}
else reportWarning(msg)
}
def handleArgument(opt: OptionDef[_, C], arg: String): Unit = {
opt.applyArgument(arg, _config) match {
case Right(c) =>
_config = c
pushChildren(opt)
case Left(xs) =>
_error = true
xs foreach reportError
}
}
def handleOccurrence(opt: OptionDef[_, C], pending: ListBuffer[OptionDef[_, C]]): Unit = {
occurrences(opt) += 1
if (occurrences(opt) >= opt.getMaxOccurs) {
pending -= opt
}
}
def findCommand(cmd: String): Option[OptionDef[_, C]] =
pendingCommands find {_.name == cmd}
// greedy match
def handleShortOptions(g0: String): Unit = {
val gs = (0 to g0.size - 1).toSeq map { n => g0.substring(0, g0.size - n) }
gs flatMap { g => pendingOptions map {(g, _)} } find { case (g, opt) =>
opt.shortOptTokens("-" + g) == 1
} match {
case Some(p) =>
val (g, option) = p
handleOccurrence(option, pendingOptions)
handleArgument(option, "")
if (g0.drop(g.size) != "") {
handleShortOptions(g0 drop g.size)
}
case None => handleError("Unknown option " + "-" + g0)
}
}
def handleChecks(c: C): Unit = {
Validation.validateValue(checks flatMap {_.checks})(c) match {
case Right(c) => // do nothing
case Left(xs) =>
_error = true
xs foreach reportError
}
}
while (i < args.length) {
pendingOptions find {_.tokensToRead(i, args) > 0} match {
case Some(option) =>
handleOccurrence(option, pendingOptions)
option(i, args) match {
case Right(v) => handleArgument(option, v)
case Left(outOfBounds) => handleError(outOfBounds)
}
// move index forward for gobbling
if (option.tokensToRead(i, args) > 1) {
i += option.tokensToRead(i, args) - 1
} // if
case None =>
args(i) match {
case arg if arg startsWith "--" => handleError("Unknown option " + arg)
case arg if arg startsWith "-" =>
if (arg == "-") handleError("Unknown option " + arg)
else handleShortOptions(arg drop 1)
case arg if findCommand(arg).isDefined =>
val cmd = findCommand(arg).get
handleOccurrence(cmd, pendingCommands)
handleArgument(cmd, "")
case arg if pendingArgs.isEmpty => handleError("Unknown argument '" + arg + "'")
case arg =>
val first = pendingArgs.head
handleOccurrence(first, pendingArgs)
handleArgument(first, arg)
}
}
i += 1
}
(pendingOptions filter { opt => opt.getMinOccurs > occurrences(opt) }) foreach { opt =>
if (opt.getMinOccurs == 1) reportError("Missing " + opt.shortDescription)
else reportError(opt.shortDescription.capitalize + " must be given " + opt.getMinOccurs + " times")
_error = true
}
(pendingArgs filter { arg => arg.getMinOccurs > occurrences(arg) }) foreach { arg =>
if (arg.getMinOccurs == 1) reportError("Missing " + arg.shortDescription)
else reportError(arg.shortDescription.capitalize + "' must be given " + arg.getMinOccurs + " times")
_error = true
}
handleChecks(_config)
if (_error) {
if (showUsageOnError) showUsageAsError()
else showTryHelp()
None
}
else Some(_config)
}
}
class OptionDef[A: Read, C](
_parser: OptionParser[C],
_id: Int,
_kind: OptionDefKind,
_name: String,
_shortOpt: Option[String],
_keyName: Option[String],
_valueName: Option[String],
_desc: String,
_action: (A, C) => C,
_validations: Seq[A => Either[String, Unit]],
_configValidations: Seq[C => Either[String, Unit]],
_parentId: Option[Int],
_minOccurs: Int,
_maxOccurs: Int,
_isHidden: Boolean) {
import OptionDef._
def this(parser: OptionParser[C], kind: OptionDefKind, name: String) =
this(_parser = parser, _id = OptionDef.generateId, _kind = kind, _name = name,
_shortOpt = None, _keyName = None, _valueName = None,
_desc = "", _action = { (a: A, c: C) => c },
_validations = Seq(), _configValidations = Seq(),
_parentId = None, _minOccurs = 0, _maxOccurs = 1,
_isHidden = false)
private[scopt] def copy(
_parser: OptionParser[C] = this._parser,
_id: Int = this._id,
_kind: OptionDefKind = this._kind,
_name: String = this._name,
_shortOpt: Option[String] = this._shortOpt,
_keyName: Option[String] = this._keyName,
_valueName: Option[String] = this._valueName,
_desc: String = this._desc,
_action: (A, C) => C = this._action,
_validations: Seq[A => Either[String, Unit]] = this._validations,
_configValidations: Seq[C => Either[String, Unit]] = this._configValidations,
_parentId: Option[Int] = this._parentId,
_minOccurs: Int = this._minOccurs,
_maxOccurs: Int = this._maxOccurs,
_isHidden: Boolean = this._isHidden): OptionDef[A, C] =
new OptionDef(_parser = _parser, _id = _id, _kind = _kind, _name = _name, _shortOpt = _shortOpt,
_keyName = _keyName, _valueName = _valueName, _desc = _desc, _action = _action,
_validations = _validations, _configValidations = _configValidations,
_parentId = _parentId, _minOccurs = _minOccurs, _maxOccurs = _maxOccurs,
_isHidden = _isHidden)
private[this] def read: Read[A] = implicitly[Read[A]]
/** Adds a callback function. */
def action(f: (A, C) => C): OptionDef[A, C] =
_parser.updateOption(copy(_action = (a: A, c: C) => { f(a, _action(a, c)) }))
/** Adds a callback function. */
def foreach(f: A => Unit): OptionDef[A, C] =
_parser.updateOption(copy(_action = (a: A, c: C) => {
val c2 = _action(a, c)
f(a)
c2
}))
override def toString: String = fullName
/** Adds short option -x. */
def abbr(x: String): OptionDef[A, C] =
_parser.updateOption(copy(_shortOpt = Some(x)))
/** Requires the option to appear at least `n` times. */
def minOccurs(n: Int): OptionDef[A, C] =
_parser.updateOption(copy(_minOccurs = n))
/** Requires the option to appear at least once. */
def required(): OptionDef[A, C] = minOccurs(1)
/** Chanages the option to be optional. */
def optional(): OptionDef[A, C] = minOccurs(0)
/** Allows the argument to appear at most `n` times. */
def maxOccurs(n: Int): OptionDef[A, C] =
_parser.updateOption(copy(_maxOccurs = n))
/** Allows the argument to appear multiple times. */
def unbounded(): OptionDef[A, C] = maxOccurs(UNBOUNDED)
/** Adds description in the usage text. */
def text(x: String): OptionDef[A, C] =
_parser.updateOption(copy(_desc = x))
/** Adds value name used in the usage text. */
def valueName(x: String): OptionDef[A, C] =
_parser.updateOption(copy(_valueName = Some(x)))
/** Adds key name used in the usage text. */
def keyName(x: String): OptionDef[A, C] =
_parser.updateOption(copy(_keyName = Some(x)))
/** Adds key and value names used in the usage text. */
def keyValueName(k: String, v: String): OptionDef[A, C] =
keyName(k) valueName(v)
/** Adds custom validation. */
def validate(f: A => Either[String, Unit]) =
_parser.updateOption(copy(_validations = _validations :+ f))
/** Hides the option in any usage text. */
def hidden(): OptionDef[A, C] =
_parser.updateOption(copy(_isHidden = true))
private[scopt] def validateConfig(f: C => Either[String, Unit]) =
_parser.updateOption(copy(_configValidations = _configValidations :+ f))
private[scopt] def parent(x: OptionDef[_, C]): OptionDef[A, C] =
_parser.updateOption(copy(_parentId = Some(x.id)))
/** Adds opt/arg under this command. */
def children(xs: OptionDef[_, C]*): OptionDef[A, C] = {
xs foreach {_.parent(this)}
this
}
private[scopt] val kind: OptionDefKind = _kind
private[scopt] val id: Int = _id
private[scopt] val name: String = _name
private[scopt] def callback: (A, C) => C = _action
private[scopt] def getMinOccurs: Int = _minOccurs
private[scopt] def getMaxOccurs: Int = _maxOccurs
private[scopt] def shortOptOrBlank: String = _shortOpt getOrElse("")
private[scopt] def hasParent: Boolean = _parentId.isDefined
private[scopt] def getParentId: Option[Int] = _parentId
private[scopt] def isHidden: Boolean = _isHidden
private[scopt] def checks: Seq[C => Either[String, Unit]] = _configValidations
private[scopt] def applyArgument(arg: String, config: C): Either[Seq[String], C] =
try {
val x = read.reads(arg)
Validation.validateValue(_validations)(x) match {
case Right(_) => Right(callback(x, config))
case Left(xs) => Left(xs)
}
} catch {
case e: NumberFormatException => Left(Seq(shortDescription.capitalize + " expects a number but was given '" + arg + "'"))
case e: UnknownHostException => Left(Seq(shortDescription.capitalize + " expects a host name or an IP address but was given '" + arg + "' which is invalid"))
case e: ParseException => Left(Seq(shortDescription.capitalize + " expects a Scala duration but was given '" + arg + "'"))
case e: Throwable => Left(Seq(shortDescription.capitalize + " failed when given '" + arg + "'. " + e.getMessage))
}
// number of tokens to read: 0 for no match, 2 for "--foo 1", 1 for "--foo:1"
private[scopt] def shortOptTokens(arg: String): Int =
_shortOpt match {
case Some(c) if arg == "-" + shortOptOrBlank => 1 + read.tokensToRead
case Some(c) if arg startsWith ("-" + shortOptOrBlank + ":") => 1
case Some(c) if arg startsWith ("-" + shortOptOrBlank + "=") => 1
case _ => 0
}
private[scopt] def longOptTokens(arg: String): Int =
if (arg == fullName) 1 + read.tokensToRead
else if ((arg startsWith (fullName + ":")) || (arg startsWith (fullName + "="))) 1
else 0
private[scopt] def tokensToRead(i: Int, args: Seq[String]): Int =
if (i >= args.length || kind != Opt) 0
else args(i) match {
case arg if longOptTokens(arg) > 0 => longOptTokens(arg)
case arg if shortOptTokens(arg) > 0 => shortOptTokens(arg)
case _ => 0
}
private[scopt] def apply(i: Int, args: Seq[String]): Either[String, String] =
if (i >= args.length || kind != Opt) Left("Option does not match")
else args(i) match {
case arg if longOptTokens(arg) == 2 || shortOptTokens(arg) == 2 =>
token(i + 1, args) map {Right(_)} getOrElse Left("Missing value after " + arg)
case arg if longOptTokens(arg) == 1 && read.tokensToRead == 1 =>
Right(arg drop (fullName + ":").length)
case arg if shortOptTokens(arg) == 1 && read.tokensToRead == 1 =>
Right(arg drop ("-" + shortOptOrBlank + ":").length)
case _ => Right("")
}
private[scopt] def token(i: Int, args: Seq[String]): Option[String] =
if (i >= args.length || kind != Opt) None
else Some(args(i))
private[scopt] def usage: String =
kind match {
case Head | Note | Check => _desc
case Cmd =>
"Command: " + _parser.commandExample(Some(this)) + NL + _desc
case Arg => WW + name + NLTB + _desc
case Opt if read.arity == 2 =>
WW + (_shortOpt map { o => "-" + o + ":" + keyValueString + " | " } getOrElse { "" }) +
fullName + ":" + keyValueString + NLTB + _desc
case Opt if read.arity == 1 =>
WW + (_shortOpt map { o => "-" + o + " " + valueString + " | " } getOrElse { "" }) +
fullName + " " + valueString + NLTB + _desc
case Opt =>
WW + (_shortOpt map { o => "-" + o + " | " } getOrElse { "" }) +
fullName + NLTB + _desc
}
private[scopt] def keyValueString: String = (_keyName getOrElse defaultKeyName) + "=" + valueString
private[scopt] def valueString: String = (_valueName getOrElse defaultValueName)
def shortDescription: String =
kind match {
case Opt => "option " + fullName
case Cmd => "command " + fullName
case _ => "argument " + fullName
}
def fullName: String =
kind match {
case Opt => "--" + name
case _ => name
}
private[scopt] def argName: String =
kind match {
case Arg if getMinOccurs == 0 => "[" + fullName + "]"
case _ => fullName
}
}
private[scopt] object OptionDef {
val UNBOUNDED = Int.MaxValue
val NL = System.getProperty("line.separator")
val WW = " "
val TB = " "
val NLTB = NL + TB
val NLNL = NL + NL
val defaultKeyName = "<key>"
val defaultValueName = "<value>"
val atomic = new java.util.concurrent.atomic.AtomicInteger
def generateId: Int = atomic.incrementAndGet
def makeSuccess[A]: Either[A, Unit] = Right(())
}
| MrVPlussOne/Muse-CGH | src/scopt/options.scala | Scala | mit | 26,341 |
/*
Copyright 2013 Stephen K Samuel
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.sksamuel.scrimage.filter
import com.sksamuel.scrimage.BufferedOpFilter
/** @author Stephen Samuel */
class TwirlFilter(angle: Double, radius: Int, centerX: Float, centerY: Float) extends BufferedOpFilter {
val op = new thirdparty.jhlabs.image.TwirlFilter()
op.setCentreX(centerX)
op.setCentreY(centerY)
op.setRadius(radius.toFloat)
op.setAngle(angle.toFloat)
}
object TwirlFilter {
def apply(radius: Int): TwirlFilter = apply(Math.PI / 1.5, radius)
def apply(angle: Double, radius: Int, centerX: Float = 0.5f, centerY: Float = 0.5f): TwirlFilter =
new TwirlFilter(angle, radius, centerX, centerY)
}
| carlosFattor/scrimage | scrimage-filters/src/main/scala/com/sksamuel/scrimage/filter/TwirlFilter.scala | Scala | apache-2.0 | 1,225 |
package com.twitter.finagle.builder
import com.twitter.conversions.time._
import com.twitter.finagle.ChannelClosedException
import com.twitter.finagle.Service
import com.twitter.finagle.{Codec, CodecFactory}
import com.twitter.io.Charsets
import com.twitter.util.{Await, Future}
import java.net.InetSocketAddress
import org.jboss.netty.channel.{Channels, ChannelPipelineFactory}
import org.jboss.netty.handler.codec.frame.{Delimiters, DelimiterBasedFrameDecoder}
import org.jboss.netty.handler.codec.string.{StringEncoder, StringDecoder}
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
/**
* This Codec is a newline (\\n) delimited line-based protocol. Here we re-use existing
* encoders/decoders as provided by Netty. This codec allows us to make requests which
* are incomplete due to missing newline (\\n)
*/
object ServerChannelConfigCodec extends ServerChannelConfigCodec
class ServerChannelConfigCodec extends CodecFactory[String, String] {
def server = Function.const {
new Codec[String, String] {
def pipelineFactory = new ChannelPipelineFactory {
def getPipeline = {
val pipeline = Channels.pipeline()
pipeline.addLast("line",
new DelimiterBasedFrameDecoder(100, Delimiters.lineDelimiter: _*))
pipeline.addLast("stringDecoder", new StringDecoder(Charsets.Utf8))
pipeline.addLast("stringEncoder", new StringEncoder(Charsets.Utf8))
pipeline
}
}
}
}
def client = Function.const {
new Codec[String, String] {
def pipelineFactory = new ChannelPipelineFactory {
def getPipeline = {
val pipeline = Channels.pipeline()
pipeline.addLast("stringEncode", new StringEncoder(Charsets.Utf8))
pipeline.addLast("stringDecode", new StringDecoder(Charsets.Utf8))
pipeline
}
}
}
}
}
@RunWith(classOf[JUnitRunner])
class ServerChannelConfigurationTest extends FunSuite {
val service = new Service[String, String] {
def apply(request: String) = Future.value(request)
}
if (!sys.props.contains("SKIP_FLAKY")) test("close connection after max life time duration") {
// create a server builder which will close connections in 2 seconds
val address = new InetSocketAddress(0)
val server = ServerBuilder()
.codec(ServerChannelConfigCodec)
.bindTo(address)
.name("FinagleServer")
.hostConnectionMaxLifeTime(2 seconds)
.build(service)
val client: Service[String, String] = ClientBuilder()
.codec(ServerChannelConfigCodec)
.hosts(server.localAddress)
.hostConnectionLimit(1)
.build()
// Issue a request which is NOT newline-delimited. Server should close connection
// after waiting for 2 seconds for a new line
intercept[ChannelClosedException] {
Await.result(client("123"), 5.seconds)
}
server.close()
}
test("close connection after max idle time duration") {
// create a server builder which will close idle connections in 2 seconds
val address = new InetSocketAddress(0)
val server = ServerBuilder()
.codec(ServerChannelConfigCodec)
.bindTo(address)
.name("FinagleServer")
.hostConnectionMaxIdleTime(2 seconds)
.build(service)
val client: Service[String, String] = ClientBuilder()
.codec(ServerChannelConfigCodec)
.hosts(server.localAddress)
.hostConnectionLimit(1)
.build()
// Issue a request which is NOT newline-delimited. Server should close connection
// after waiting for 2 seconds for a new line
intercept[ChannelClosedException] {
Await.result(client("123"), 5.seconds)
}
server.close()
}
}
| JustinTulloss/finagle | finagle-core/src/test/scala/com/twitter/finagle/builder/ServerChannelConfigurationTest.scala | Scala | apache-2.0 | 3,739 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import time._
import SpanSugar._
private[scalatest] final case class RunningTest(
suiteName: String,
suiteId: String,
testName: String,
startTimeStamp: Long
) extends Comparable[RunningTest] {
// Orders them first by suiteId then by testName. When I initially made this depend on timestamp, the
// silly ConcurrentSkipListSet couldn't find it.
def compareTo(other: RunningTest): Int = {
val suiteIdComp: Int = suiteId.compareTo(other.suiteId)
if (suiteIdComp == 0) {
testName.compareTo(other.testName)
} else suiteIdComp
}
override def equals(other: Any): Boolean =
other match {
case rt: RunningTest => rt.suiteId == suiteId && rt.testName == testName
case _ => false
}
override def hashCode: Int =
41 * (
41 + suiteId.hashCode
) + testName.hashCode
def toSlowpoke(currentTimeStamp: Long): Slowpoke =
Slowpoke(
suiteName = suiteName,
suiteId = suiteId,
testName = testName,
Span(currentTimeStamp - startTimeStamp, Millis)
)
}
| travisbrown/scalatest | src/main/scala/org/scalatest/RunningTest.scala | Scala | apache-2.0 | 1,665 |
package core.host
import core.spatial.{Viewable, Zone}
//The class that will extends Host will contain the implementations for each zones
//It is aggregated by the HostActor that call its functions
abstract class Host(val zone: Zone) extends InputReceiver{
def getViewableFromZone(id: String , zone : Zone) : Iterable[Viewable]
}
| DeltaIMT/Delta | framework/src/main/scala/core/host/Host.scala | Scala | mit | 338 |
/* __ *\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2015, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
package scalaShadowing
/**
* The `scala.language` object controls the language features available to the programmer, as proposed in the
* [[https://docs.google.com/document/d/1nlkvpoIRkx7at1qJEZafJwthZ3GeIklTFhqmXMvTX9Q/edit '''SIP-18 document''']].
*
* Each of these features has to be explicitly imported into the current scope to become available:
* {{{
* import language.postfixOps // or language._
* List(1, 2, 3) reverse
* }}}
*
* The language features are:
* - [[dynamics `dynamics`]] enables defining calls rewriting using the [[scala.Dynamic `Dynamic`]] trait
* - [[postfixOps `postfixOps`]] enables postfix operators
* - [[reflectiveCalls `reflectiveCalls`]] enables using structural types
* - [[implicitConversions `implicitConversions`]] enables defining implicit methods and members
* - [[higherKinds `higherKinds`]] enables writing higher-kinded types
* - [[existentials `existentials`]] enables writing existential types
* - [[experimental `experimental`]] contains newer features that have not yet been tested in production
*
* and, for dotty:
*
* - [[Scala2 `Scala2`]] backwards compatibility mode for Scala2
* - [[noAutoTupling `noAutoTupling`]] disable auto-tupling
* - [[strictEquality `strictEquality`]] enable strick equality
*
* @groupname production Language Features
* @groupname experimental Experimental Language Features
* @groupprio experimental 10
*
* Dotty-specific features come at the end.
*
* Note: Due to the more restricted language import mechanism in dotty (only
* imports count, implicits are disregarded) we don't need the constructions
* of the inherited language features. A simple object for each feature is
* sufficient.
*/
object language {
import languageFeature._
/** Where enabled, direct or indirect subclasses of trait scala.Dynamic can
* be defined. Unless dynamics is enabled, a definition of a class, trait,
* or object that has Dynamic as a base trait is rejected. Dynamic member
* selection of existing subclasses of trait Dynamic are unaffected;
* they can be used anywhere.
*
* '''Why introduce the feature?''' To enable flexible DSLs and convenient interfacing
* with dynamic languages.
*
* '''Why control it?''' Dynamic member selection can undermine static checkability
* of programs. Furthermore, dynamic member selection often relies on reflection,
* which is not available on all platforms.
*
* @group production
*/
implicit lazy val dynamics: dynamics = languageFeature.dynamics
/** Only where enabled, postfix operator notation `(expr op)` will be allowed.
*
* '''Why keep the feature?''' Several DSLs written in Scala need the notation.
*
* '''Why control it?''' Postfix operators interact poorly with semicolon inference.
* Most programmers avoid them for this reason.
*
* @group production
*/
implicit lazy val postfixOps: postfixOps = languageFeature.postfixOps
/** Only where enabled, accesses to members of structural types that need
* reflection are supported. Reminder: A structural type is a type of the form
* `Parents { Decls }` where `Decls` contains declarations of new members that do
* not override any member in `Parents`. To access one of these members, a
* reflective call is needed.
*
* '''Why keep the feature?''' Structural types provide great flexibility because
* they avoid the need to define inheritance hierarchies a priori. Besides,
* their definition falls out quite naturally from Scala’s concept of type refinement.
*
* '''Why control it?''' Reflection is not available on all platforms. Popular tools
* such as ProGuard have problems dealing with it. Even where reflection is available,
* reflective dispatch can lead to surprising performance degradations.
*
* @group production
*/
implicit lazy val reflectiveCalls: reflectiveCalls = languageFeature.reflectiveCalls
/** Only where enabled, definitions of legacy implicit conversions and certain uses
* of implicit conversions are allowed.
*
* A legacy implicit conversion is an implicit value of unary function type `A => B`,
* or an implicit method that has in its first parameter section a single,
* non-implicit parameter. Examples:
*
* {{{
* implicit def stringToInt(s: String): Int = s.length
* implicit val conv = (s: String) => s.length
* implicit def listToX(xs: List[T])(implicit f: T => X): X = ...
* }}}
*
* Implicit values of other types are not affected, and neither are implicit
* classes. In particular, given instances of the scala.Conversion class can be
* defined without having to import the language feature.
*
* The language import is also required to enable _uses_ of implicit conversions
* unless the conversion in question is co-defined with the type to which it maps.
* Co-defined means: defined in the companion object of the class of the result type.
* Examples:
*
* {{{
* class A
* class B
* object B {
* given a2b as Conversion[A, B] { ... }
* }
* object C {
* given b2a as Conversion[B, A] { ... }
* }
* import given B._
* import given C._
* val x: A = new B // language import required
* val x: B = new A // no import necessary since a2b is co-defined with B
* }}}
*
* '''Why keep the feature?''' Implicit conversions are central to many aspects
* of Scala’s core libraries.
*
* '''Why control it?''' Implicit conversions are known to cause many pitfalls
* if over-used. This holds in particular for implicit conversions defined after
* the fact between unrelated types.
*
* @group production
*/
implicit lazy val implicitConversions: implicitConversions = languageFeature.implicitConversions
/** Only where this flag is enabled, higher-kinded types can be written.
*
* '''Why keep the feature?''' Higher-kinded types enable the definition of very general
* abstractions such as functor, monad, or arrow. A significant set of advanced
* libraries relies on them. Higher-kinded types are also at the core of the
* scala-virtualized effort to produce high-performance parallel DSLs through staging.
*
* '''Why control it?''' Higher kinded types in Scala lead to a Turing-complete
* type system, where compiler termination is no longer guaranteed. They tend
* to be useful mostly for type-level computation and for highly generic design
* patterns. The level of abstraction implied by these design patterns is often
* a barrier to understanding for newcomers to a Scala codebase. Some syntactic
* aspects of higher-kinded types are hard to understand for the uninitiated and
* type inference is less effective for them than for normal types. Because we are
* not completely happy with them yet, it is possible that some aspects of
* higher-kinded types will change in future versions of Scala. So an explicit
* enabling also serves as a warning that code involving higher-kinded types
* might have to be slightly revised in the future.
*
* @group production
*/
implicit lazy val higherKinds: higherKinds = languageFeature.higherKinds
/** Only where enabled, existential types that cannot be expressed as wildcard
* types can be written and are allowed in inferred types of values or return
* types of methods. Existential types with wildcard type syntax such as `List[_]`,
* or `Map[String, _]` are not affected.
*
* '''Why keep the feature?''' Existential types are needed to make sense of Java’s wildcard
* types and raw types and the erased types of run-time values.
*
* '''Why control it?''' Having complex existential types in a code base usually makes
* application code very brittle, with a tendency to produce type errors with
* obscure error messages. Therefore, going overboard with existential types
* is generally perceived not to be a good idea. Also, complicated existential types
* might be no longer supported in a future simplification of the language.
*
* @group production
*/
implicit lazy val existentials: existentials = languageFeature.existentials
/** The experimental object contains features that have been recently added but have not
* been thoroughly tested in production yet.
*
* Experimental features '''may undergo API changes''' in future releases, so production
* code should not rely on them.
*
* Programmers are encouraged to try out experimental features and
* [[http://issues.scala-lang.org report any bugs or API inconsistencies]]
* they encounter so they can be improved in future releases.
*
* @group experimental
*/
object experimental {
import languageFeature.experimental._
/** Where enabled, macro definitions are allowed. Macro implementations and
* macro applications are unaffected; they can be used anywhere.
*
* '''Why introduce the feature?''' Macros promise to make the language more regular,
* replacing ad-hoc language constructs with a general powerful abstraction
* capability that can express them. Macros are also a more disciplined and
* powerful replacement for compiler plugins.
*
* '''Why control it?''' For their very power, macros can lead to code that is hard
* to debug and understand.
*/
implicit lazy val macros: macros = languageFeature.experimental.macros
}
/** Where imported, a backwards compatibility mode for Scala2 is enabled */
object Scala2Compat
/** Where imported, auto-tupling is disabled */
object noAutoTupling
/** Where imported, loose equality using eqAny is disabled */
object strictEquality
/** Where imported, ad hoc extensions of non-open classes in other
* compilation units are allowed.
*
* '''Why control the feature?''' Ad-hoc extensions should usually be avoided
* since they typically cannot rely on an "internal" contract between a class
* and its extensions. Only open classes need to specify such a contract.
* Ad-hoc extensions might break for future versions of the extended class,
* since the extended class is free to change its implementation without
* being constrained by an internal contract.
*
* '''Why allow it?''' An ad-hoc extension can sometimes be necessary,
* for instance when mocking a class in a testing framework, or to work
* around a bug or missing feature in the original class. Nevertheless,
* such extensions should be limited in scope and clearly documented.
* That's why the language import is required for them.
*/
object adhocExtensions
/** Source version */
object `3.0-migration`
object `3.0`
object `3.1-migration`
object `3.1`
}
| som-snytt/dotty | library/src/scalaShadowing/language.scala | Scala | apache-2.0 | 11,632 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.collection
package mutable
import java.util.ConcurrentModificationException
import scala.collection.generic.DefaultSerializable
/**
* @define Coll `OpenHashMap`
* @define coll open hash map
*/
@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0")
@SerialVersionUID(3L)
object OpenHashMap extends MapFactory[OpenHashMap] {
def empty[K, V] = new OpenHashMap[K, V]
def from[K, V](it: IterableOnce[(K, V)]): OpenHashMap[K,V] = empty ++= it
def newBuilder[K, V]: Builder[(K, V), OpenHashMap[K,V]] =
new GrowableBuilder[(K, V), OpenHashMap[K, V]](empty)
/** A hash table entry.
*
* The entry is occupied if and only if its `value` is a `Some`;
* deleted if and only if its `value` is `None`.
* If its `key` is not the default value of type `Key`, the entry is occupied.
* If the entry is occupied, `hash` contains the hash value of `key`.
*/
final private class OpenEntry[Key, Value](var key: Key,
var hash: Int,
var value: Option[Value])
}
/** A mutable hash map based on an open addressing method. The precise scheme is
* undefined, but it should make a reasonable effort to ensure that an insert
* with consecutive hash codes is not unnecessarily penalised. In particular,
* mappings of consecutive integer keys should work without significant
* performance loss.
*
* @tparam Key type of the keys in this map.
* @tparam Value type of the values in this map.
* @param initialSize the initial size of the internal hash table.
*
* @define Coll `OpenHashMap`
* @define coll open hash map
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0")
class OpenHashMap[Key, Value](initialSize : Int)
extends AbstractMap[Key, Value]
with MapOps[Key, Value, OpenHashMap, OpenHashMap[Key, Value]]
with StrictOptimizedIterableOps[(Key, Value), Iterable, OpenHashMap[Key, Value]]
with MapFactoryDefaults[Key, Value, OpenHashMap, Iterable]
with DefaultSerializable {
import OpenHashMap.OpenEntry
private type Entry = OpenEntry[Key, Value]
/** A default constructor creates a hashmap with initial size `8`.
*/
def this() = this(8)
override def mapFactory: MapFactory[OpenHashMap] = OpenHashMap
private[this] val actualInitialSize = HashTable.nextPositivePowerOfTwo(initialSize)
private[this] var mask = actualInitialSize - 1
/** The hash table.
*
* The table's entries are initialized to `null`, indication of an empty slot.
* A slot is either deleted or occupied if and only if the entry is non-`null`.
*/
private[this] var table = new Array[Entry](actualInitialSize)
private[this] var _size = 0
private[this] var deleted = 0
// Used for tracking inserts so that iterators can determine if concurrent modification has occurred.
private[this] var modCount = 0
override def size = _size
override def knownSize: Int = size
private[this] def size_=(s : Int): Unit = _size = s
override def isEmpty: Boolean = _size == 0
/** Returns a mangled hash code of the provided key. */
protected def hashOf(key: Key) = {
var h = key.##
h ^= ((h >>> 20) ^ (h >>> 12))
h ^ (h >>> 7) ^ (h >>> 4)
}
/** Increase the size of the table.
* Copy only the occupied slots, effectively eliminating the deleted slots.
*/
private[this] def growTable() = {
val oldSize = mask + 1
val newSize = 4 * oldSize
val oldTable = table
table = new Array[Entry](newSize)
mask = newSize - 1
oldTable.foreach( entry =>
if (entry != null && entry.value != None)
table(findIndex(entry.key, entry.hash)) = entry )
deleted = 0
}
/** Return the index of the first slot in the hash table (in probe order)
* that is, in order of preference, either occupied by the given key, deleted, or empty.
*
* @param hash hash value for `key`
*/
private[this] def findIndex(key: Key, hash: Int): Int = {
var index = hash & mask
var j = 0
/** Index of the first slot containing a deleted entry, or -1 if none found yet. */
var firstDeletedIndex = -1
var entry = table(index)
while (entry != null) {
if (entry.hash == hash && entry.key == key && entry.value != None)
return index
if (firstDeletedIndex == -1 && entry.value == None)
firstDeletedIndex = index
j += 1
index = (index + j) & mask
entry = table(index)
}
if (firstDeletedIndex == -1) index else firstDeletedIndex
}
// TODO refactor `put` to extract `findOrAddEntry` and implement this in terms of that to avoid Some boxing.
override def update(key: Key, value: Value): Unit = put(key, value)
@deprecatedOverriding("addOne should not be overridden in order to maintain consistency with put.", "2.11.0")
def addOne (kv: (Key, Value)): this.type = { put(kv._1, kv._2); this }
@deprecatedOverriding("subtractOne should not be overridden in order to maintain consistency with remove.", "2.11.0")
def subtractOne (key: Key): this.type = { remove(key); this }
override def put(key: Key, value: Value): Option[Value] =
put(key, hashOf(key), value)
private def put(key: Key, hash: Int, value: Value): Option[Value] = {
if (2 * (size + deleted) > mask) growTable()
val index = findIndex(key, hash)
val entry = table(index)
if (entry == null) {
table(index) = new OpenEntry(key, hash, Some(value))
modCount += 1
size += 1
None
} else {
val res = entry.value
if (entry.value == None) {
entry.key = key
entry.hash = hash
size += 1
deleted -= 1
modCount += 1
}
entry.value = Some(value)
res
}
}
/** Delete the hash table slot contained in the given entry. */
@`inline`
private[this] def deleteSlot(entry: Entry) = {
entry.key = null.asInstanceOf[Key]
entry.hash = 0
entry.value = None
size -= 1
deleted += 1
}
override def remove(key : Key): Option[Value] = {
val entry = table(findIndex(key, hashOf(key)))
if (entry != null && entry.value != None) {
val res = entry.value
deleteSlot(entry)
res
} else None
}
def get(key : Key) : Option[Value] = {
val hash = hashOf(key)
var index = hash & mask
var entry = table(index)
var j = 0
while(entry != null){
if (entry.hash == hash &&
entry.key == key){
return entry.value
}
j += 1
index = (index + j) & mask
entry = table(index)
}
None
}
/** An iterator over the elements of this map. Use of this iterator follows
* the same contract for concurrent modification as the foreach method.
*
* @return the iterator
*/
def iterator: Iterator[(Key, Value)] = new OpenHashMapIterator[(Key, Value)] {
override protected def nextResult(node: Entry): (Key, Value) = (node.key, node.value.get)
}
override def keysIterator: Iterator[Key] = new OpenHashMapIterator[Key] {
override protected def nextResult(node: Entry): Key = node.key
}
override def valuesIterator: Iterator[Value] = new OpenHashMapIterator[Value] {
override protected def nextResult(node: Entry): Value = node.value.get
}
private abstract class OpenHashMapIterator[A] extends AbstractIterator[A] {
private[this] var index = 0
private[this] val initialModCount = modCount
private[this] def advance(): Unit = {
if (initialModCount != modCount) throw new ConcurrentModificationException
while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1
}
def hasNext = {advance(); index <= mask }
def next() = {
advance()
val result = table(index)
index += 1
nextResult(result)
}
protected def nextResult(node: Entry): A
}
override def clone() = {
val it = new OpenHashMap[Key, Value]
foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get))
it
}
/** Loop over the key, value mappings of this map.
*
* The behaviour of modifying the map during an iteration is as follows:
* - Deleting a mapping is always permitted.
* - Changing the value of mapping which is already present is permitted.
* - Anything else is not permitted. It will usually, but not always, throw an exception.
*
* @tparam U The return type of the specified function `f`, return result of which is ignored.
* @param f The function to apply to each key, value mapping.
*/
override def foreach[U](f : ((Key, Value)) => U): Unit = {
val startModCount = modCount
foreachUndeletedEntry(entry => {
if (modCount != startModCount) throw new ConcurrentModificationException
f((entry.key, entry.value.get))}
)
}
override def foreachEntry[U](f : (Key, Value) => U): Unit = {
val startModCount = modCount
foreachUndeletedEntry(entry => {
if (modCount != startModCount) throw new ConcurrentModificationException
f(entry.key, entry.value.get)}
)
}
private[this] def foreachUndeletedEntry(f : Entry => Unit): Unit = {
table.foreach(entry => if (entry != null && entry.value != None) f(entry))
}
override def mapValuesInPlace(f : (Key, Value) => Value): this.type = {
foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get)))
this
}
override def filterInPlace(f : (Key, Value) => Boolean): this.type = {
foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) deleteSlot(entry))
this
}
override protected[this] def stringPrefix = "OpenHashMap"
}
| martijnhoekstra/scala | src/library/scala/collection/mutable/OpenHashMap.scala | Scala | apache-2.0 | 10,156 |
/**
*
* GooglePlayServiceHelper
* Ledger wallet
*
* Created by Pierre Pollastri on 11/02/15.
*
* The MIT License (MIT)
*
* Copyright (c) 2015 Ledger
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package co.ledger.wallet.core.utils
import android.app.Activity
import android.content.Context
import com.google.android.gms.common.{ConnectionResult, GooglePlayServicesUtil}
import com.google.android.gms.gcm.GoogleCloudMessaging
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Promise, Future}
object GooglePlayServiceHelper {
val PlayServicesResolutionRequest = 9000
val GooglePlayServicePreferences = "GooglePlayServicePreferences"
val GcmRegistrationIdPreferenceKey = "GcmRegistrationIdPreference"
val GcmRegistrationVersionPreferenceKey = "GcmRegistrationVersionPreference"
def isGooglePlayServicesAvailable(implicit context: Context): Boolean = GooglePlayServicesUtil.isGooglePlayServicesAvailable(context) == ConnectionResult.SUCCESS
def checkPlayServices(implicit context: Context): Boolean = {
GooglePlayServicesUtil.isGooglePlayServicesAvailable(context) match {
case ConnectionResult.SUCCESS => true
case error: Int => {
if (GooglePlayServicesUtil.isUserRecoverableError(error) && context.isInstanceOf[Activity]) {
GooglePlayServicesUtil.getErrorDialog(error, context.asInstanceOf[Activity], PlayServicesResolutionRequest).show()
}
false
}
}
}
def getGcmInstance(implicit context: Context): Option[GoogleCloudMessaging] = {
if (checkPlayServices)
Some(GoogleCloudMessaging.getInstance(context))
else
None
}
def getGcmRegistrationId(implicit context: Context): Future[RegistrationId] = {
val registrationVersion = context
.getSharedPreferences(GooglePlayServicePreferences, Context.MODE_PRIVATE)
.getInt(GcmRegistrationVersionPreferenceKey, -1)
if (registrationVersion != AndroidUtils.getAppVersion.getOrElse(-2)) {
Future {
val gcm = getGcmInstance.get
val regId = gcm.register("1043077126300")
context
.getSharedPreferences(GooglePlayServicePreferences, Context.MODE_PRIVATE)
.edit()
.putString(GcmRegistrationIdPreferenceKey, regId)
.putInt(GcmRegistrationVersionPreferenceKey, AndroidUtils.getAppVersion.getOrElse(0))
.commit()
inflateRegistrationId(isNew = true, context)
}
} else {
val p = Promise[RegistrationId]()
p.success(inflateRegistrationId(isNew = false, context))
p.future
}
}
private[this] def inflateRegistrationId(isNew: Boolean, context: Context): RegistrationId = {
val preferences = context.getSharedPreferences(GooglePlayServicePreferences, Context.MODE_PRIVATE)
val value = preferences.getString(GcmRegistrationIdPreferenceKey, null)
new RegistrationId(value, isNew)
}
case class RegistrationId(value: String, isNew: Boolean)
}
| LedgerHQ/ledger-wallet-android | app/src/main/scala/co/ledger/wallet/core/utils/GooglePlayServiceHelper.scala | Scala | mit | 4,006 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.sumologic.sumobot.plugins.beer
import com.sumologic.sumobot.core.model.IncomingMessage
import com.sumologic.sumobot.core.util.TimeHelpers
import com.sumologic.sumobot.plugins.BotPlugin
import scala.util.Random
class Beer extends BotPlugin with TimeHelpers {
override protected def help: String = "I'll voice my opinion about certain beverages when appropriate."
private val BeerMention = matchText(".*(beer[s]?).*")
private val BeerPhrases = List(
"Ohh, what I wouldn't give for a beer.",
"I'm getting another beer.",
"Robots are made out of old beer cans.",
"Ah, Jeez, let's just pray I have the energy to get myself another beer.",
"Hmmm... okay, but I'll need ten kegs of beer, a continuous tape of \"Louie, Louie,\" and a regulation two-story panty-raid ladder.",
"Ah, beer. So many choices, and it makes so little difference.",
"Hey, that was my last beer! You bastard! I'll kill you!"
)
private var lastChimedIn = 0L
override protected def receiveIncomingMessage: ReceiveIncomingMessage = {
case message@IncomingMessage(BeerMention(beer), _, _, _, _, _, _) =>
if (now - lastChimedIn > 60000 && Random.nextInt(10) < 8) {
lastChimedIn = now
message.say(chooseRandom(BeerPhrases: _*))
}
}
}
| SumoLogic/sumobot | src/main/scala/com/sumologic/sumobot/plugins/beer/Beer.scala | Scala | apache-2.0 | 2,095 |
/*
* Copyright 2015 Michael Cuthbert
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.creak.docversioning
import net.liftweb.json._
import net.liftweb.json.Extraction._
import net.liftweb.json.JsonAST.JValue
/**
* @author Michael Cuthbert on 2/25/15.
*/
case class JVerDoc(_id:String, version:Int, changes:Map[String, List[FieldChange]], owner:Option[String]=None) {
implicit val formats = DefaultFormats
/**
* Gets the field changes for a specific version
*
* @param props The list of properties that you are looking for
* @param version The version you want, if None then will use the latest version
* @return
*/
def getChanges(props:List[String], version:Option[Int]=None) : List[FieldChange] = props flatMap { x => getFieldChange(x, version) }
/**
* Gets the field change for a specific version
*
* @param prop Property you are requesting
* @param version the version you want, if None then will use the latest version
* @return
*/
def getFieldChange(prop:String, version:Option[Int]=None) : Option[FieldChange] = {
changes.get(prop) match {
case Some(v) =>
val reqVer = version match {
case Some(ver) => ver
case None => this.version
}
Some(v flatMap { x => if (x.version <= reqVer) Some(x) else None } sortBy(- _.version) head)
case None => None
}
}
/**
* Updates the document and returns a new JVerDoc
*
* @param newUpdates
* @return
*/
def update(newUpdates:Map[String, Any], userId:Option[String]=None) : JVerDoc = {
val newVersion = version+1
val upChanges = changes map {
x =>
val nList = newUpdates.get(x._1) match {
case Some(nu) => List(FieldChange(x._1, nu, newVersion, userId))
case None => List.empty[FieldChange]
}
val newList = x._2 ::: nList
x._1 -> newList
}
this.copy(_id, newVersion, upChanges)
}
/**
* Gets the property history for a specific property
*
* @param prop The property you are searching for
* @param history how far back you want to go, 0 means infinite
* @return A list of changes for the property
*/
def getPropertyHistory(prop:String, history:Int=0) : List[FieldChange] = {
changes.get(prop) match {
case Some(l) =>
history match {
case 0 => l
case _ => l.reverse.slice(0, history)
}
case None => List.empty[FieldChange]
}
}
/**
* Gets all the changes for a specific version
*
* @param version
* @return
*/
def getVersionChanges(version:Int) : Map[String, FieldChange] = {
changes flatMap {
case x =>
val topVersion = x._2 flatMap { y => if (y.version == version) Some(y) else None }
topVersion.length match {
case 0 => None
case _ => Some(x._1 -> topVersion.head)
}
}
}
/**
* Gets the version and it's changes. This differs from getVersionChanges because it will get the latest
* changes for every single version and not just the changes for the version you are looking for
*
* @param version
* @return
*/
def getChangeVersion(version:Option[Int]=None) : Map[String, FieldChange] = {
val reqVer = version match {
case Some(v) => v
case None => this.version
}
changes map {
case x =>
val topVersion = x._2 flatMap { y => if (y.version <= reqVer) Some(y) else None} sortBy(- _.version)
x._1 -> topVersion.head
}
}
/**
* Gets a JSON version (JValue from lift-json)
* Use compact(render(x)) to get to String
*
* @param version The version you are looking for, None will set to current version
* @return
*/
def getJSONVersion(version:Option[Int]=None) : JValue =
decompose(getChangeVersion(version) map {
x => x._1 -> x._2.value
})
/**
* Gets the current version and converts it to a case class
*
* @param version The version you are looking for
* @param mf the manifest for the case class
* @tparam T the case class type
* @return
*/
def getVersion[T](version:Option[Int]=None)(implicit mf:Manifest[T]) : T = getJSONVersion(version).extract[T]
}
case class FieldChange(propertyName:String, value:Any, version:Int, user:Option[String]=None)
| Crashfreak/JVerDocs | src/main/scala/org/creak/docversioning/Docs.scala | Scala | apache-2.0 | 4,817 |
package org.broadinstitute.clio.util.generic
import org.scalatest.{FlatSpec, Matchers}
class SameFieldsTypeConverterSpec extends FlatSpec with Matchers {
behavior of "SameFieldsTypeConverter"
it should "convert between classes" in {
case class TransferObject(a: Option[String], b: Int)
case class InternalModel(a: Option[String], b: Int)
val converter = SameFieldsTypeConverter[TransferObject, InternalModel]
val transfer = TransferObject(None, 0)
val model = converter.convert(transfer)
model should be(InternalModel(None, 0))
}
}
| broadinstitute/clio | clio-util/src/test/scala/org/broadinstitute/clio/util/generic/SameFieldsTypeConverterSpec.scala | Scala | bsd-3-clause | 566 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2
import scala.collection.JavaConverters._
import org.apache.spark.sql.{SparkSession, Strategy}
import org.apache.spark.sql.catalyst.analysis.{ResolvedNamespace, ResolvedPartitionSpec, ResolvedTable}
import org.apache.spark.sql.catalyst.expressions.{And, Attribute, DynamicPruning, Expression, NamedExpression, PredicateHelper, SubqueryExpression}
import org.apache.spark.sql.catalyst.planning.PhysicalOperation
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.util.toPrettySQL
import org.apache.spark.sql.connector.catalog.{CatalogV2Util, Identifier, StagingTableCatalog, SupportsNamespaces, SupportsPartitionManagement, SupportsWrite, Table, TableCapability, TableCatalog}
import org.apache.spark.sql.connector.read.LocalScan
import org.apache.spark.sql.connector.read.streaming.{ContinuousStream, MicroBatchStream}
import org.apache.spark.sql.connector.write.V1Write
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
import org.apache.spark.sql.execution.{FilterExec, LeafExecNode, LocalTableScanExec, ProjectExec, RowDataSourceScanExec, SparkPlan}
import org.apache.spark.sql.execution.datasources.DataSourceStrategy
import org.apache.spark.sql.execution.streaming.continuous.{WriteToContinuousDataSource, WriteToContinuousDataSourceExec}
import org.apache.spark.sql.sources.{BaseRelation, TableScan}
import org.apache.spark.sql.util.CaseInsensitiveStringMap
import org.apache.spark.storage.StorageLevel
class DataSourceV2Strategy(session: SparkSession) extends Strategy with PredicateHelper {
import DataSourceV2Implicits._
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
private def withProjectAndFilter(
project: Seq[NamedExpression],
filters: Seq[Expression],
scan: LeafExecNode,
needsUnsafeConversion: Boolean): SparkPlan = {
val filterCondition = filters.reduceLeftOption(And)
val withFilter = filterCondition.map(FilterExec(_, scan)).getOrElse(scan)
if (withFilter.output != project || needsUnsafeConversion) {
ProjectExec(project, withFilter)
} else {
withFilter
}
}
private def refreshCache(r: DataSourceV2Relation)(): Unit = {
session.sharedState.cacheManager.recacheByPlan(session, r)
}
private def recacheTable(r: ResolvedTable)(): Unit = {
val v2Relation = DataSourceV2Relation.create(r.table, Some(r.catalog), Some(r.identifier))
session.sharedState.cacheManager.recacheByPlan(session, v2Relation)
}
// Invalidates the cache associated with the given table. If the invalidated cache matches the
// given table, the cache's storage level is returned.
private def invalidateTableCache(r: ResolvedTable)(): Option[StorageLevel] = {
val v2Relation = DataSourceV2Relation.create(r.table, Some(r.catalog), Some(r.identifier))
val cache = session.sharedState.cacheManager.lookupCachedData(v2Relation)
session.sharedState.cacheManager.uncacheQuery(session, v2Relation, cascade = true)
if (cache.isDefined) {
val cacheLevel = cache.get.cachedRepresentation.cacheBuilder.storageLevel
Some(cacheLevel)
} else {
None
}
}
private def invalidateCache(catalog: TableCatalog, table: Table, ident: Identifier): Unit = {
val v2Relation = DataSourceV2Relation.create(table, Some(catalog), Some(ident))
session.sharedState.cacheManager.uncacheQuery(session, v2Relation, cascade = true)
}
override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case PhysicalOperation(project, filters,
DataSourceV2ScanRelation(_, V1ScanWrapper(scan, pushed, aggregate), output)) =>
val v1Relation = scan.toV1TableScan[BaseRelation with TableScan](session.sqlContext)
if (v1Relation.schema != scan.readSchema()) {
throw QueryExecutionErrors.fallbackV1RelationReportsInconsistentSchemaError(
scan.readSchema(), v1Relation.schema)
}
val rdd = v1Relation.buildScan()
val unsafeRowRDD = DataSourceStrategy.toCatalystRDD(v1Relation, output, rdd)
val dsScan = RowDataSourceScanExec(
output,
output.toStructType,
Set.empty,
pushed.toSet,
aggregate,
unsafeRowRDD,
v1Relation,
tableIdentifier = None)
withProjectAndFilter(project, filters, dsScan, needsUnsafeConversion = false) :: Nil
case PhysicalOperation(project, filters,
DataSourceV2ScanRelation(_, scan: LocalScan, output)) =>
val localScanExec = LocalTableScanExec(output, scan.rows().toSeq)
withProjectAndFilter(project, filters, localScanExec, needsUnsafeConversion = false) :: Nil
case PhysicalOperation(project, filters, relation: DataSourceV2ScanRelation) =>
// projection and filters were already pushed down in the optimizer.
// this uses PhysicalOperation to get the projection and ensure that if the batch scan does
// not support columnar, a projection is added to convert the rows to UnsafeRow.
val (runtimeFilters, postScanFilters) = filters.partition {
case _: DynamicPruning => true
case _ => false
}
val batchExec = BatchScanExec(relation.output, relation.scan, runtimeFilters)
withProjectAndFilter(project, postScanFilters, batchExec, !batchExec.supportsColumnar) :: Nil
case PhysicalOperation(p, f, r: StreamingDataSourceV2Relation)
if r.startOffset.isDefined && r.endOffset.isDefined =>
val microBatchStream = r.stream.asInstanceOf[MicroBatchStream]
val scanExec = MicroBatchScanExec(
r.output, r.scan, microBatchStream, r.startOffset.get, r.endOffset.get)
// Add a Project here to make sure we produce unsafe rows.
withProjectAndFilter(p, f, scanExec, !scanExec.supportsColumnar) :: Nil
case PhysicalOperation(p, f, r: StreamingDataSourceV2Relation)
if r.startOffset.isDefined && r.endOffset.isEmpty =>
val continuousStream = r.stream.asInstanceOf[ContinuousStream]
val scanExec = ContinuousScanExec(r.output, r.scan, continuousStream, r.startOffset.get)
// Add a Project here to make sure we produce unsafe rows.
withProjectAndFilter(p, f, scanExec, !scanExec.supportsColumnar) :: Nil
case WriteToDataSourceV2(relationOpt, writer, query, customMetrics) =>
val invalidateCacheFunc: () => Unit = () => relationOpt match {
case Some(r) => session.sharedState.cacheManager.uncacheQuery(session, r, cascade = true)
case None => ()
}
WriteToDataSourceV2Exec(writer, invalidateCacheFunc, planLater(query), customMetrics) :: Nil
case CreateV2Table(catalog, ident, schema, parts, props, ifNotExists) =>
val propsWithOwner = CatalogV2Util.withDefaultOwnership(props)
CreateTableExec(catalog, ident, schema, parts, propsWithOwner, ifNotExists) :: Nil
case CreateTableAsSelect(catalog, ident, parts, query, props, options, ifNotExists) =>
val propsWithOwner = CatalogV2Util.withDefaultOwnership(props)
val writeOptions = new CaseInsensitiveStringMap(options.asJava)
catalog match {
case staging: StagingTableCatalog =>
AtomicCreateTableAsSelectExec(staging, ident, parts, query, planLater(query),
propsWithOwner, writeOptions, ifNotExists) :: Nil
case _ =>
CreateTableAsSelectExec(catalog, ident, parts, query, planLater(query),
propsWithOwner, writeOptions, ifNotExists) :: Nil
}
case RefreshTable(r: ResolvedTable) =>
RefreshTableExec(r.catalog, r.identifier, recacheTable(r)) :: Nil
case ReplaceTable(catalog, ident, schema, parts, props, orCreate) =>
val propsWithOwner = CatalogV2Util.withDefaultOwnership(props)
catalog match {
case staging: StagingTableCatalog =>
AtomicReplaceTableExec(
staging, ident, schema, parts, propsWithOwner, orCreate = orCreate,
invalidateCache) :: Nil
case _ =>
ReplaceTableExec(
catalog, ident, schema, parts, propsWithOwner, orCreate = orCreate,
invalidateCache) :: Nil
}
case ReplaceTableAsSelect(catalog, ident, parts, query, props, options, orCreate) =>
val propsWithOwner = CatalogV2Util.withDefaultOwnership(props)
val writeOptions = new CaseInsensitiveStringMap(options.asJava)
catalog match {
case staging: StagingTableCatalog =>
AtomicReplaceTableAsSelectExec(
staging,
ident,
parts,
query,
planLater(query),
propsWithOwner,
writeOptions,
orCreate = orCreate,
invalidateCache) :: Nil
case _ =>
ReplaceTableAsSelectExec(
catalog,
ident,
parts,
query,
planLater(query),
propsWithOwner,
writeOptions,
orCreate = orCreate,
invalidateCache) :: Nil
}
case AppendData(r @ DataSourceV2Relation(v1: SupportsWrite, _, _, _, _), query, _,
_, Some(write)) if v1.supports(TableCapability.V1_BATCH_WRITE) =>
write match {
case v1Write: V1Write =>
AppendDataExecV1(v1, query, refreshCache(r), v1Write) :: Nil
case v2Write =>
throw QueryCompilationErrors.batchWriteCapabilityError(
v1, v2Write.getClass.getName, classOf[V1Write].getName)
}
case AppendData(r: DataSourceV2Relation, query, _, _, Some(write)) =>
AppendDataExec(planLater(query), refreshCache(r), write) :: Nil
case OverwriteByExpression(r @ DataSourceV2Relation(v1: SupportsWrite, _, _, _, _), _, query,
_, _, Some(write)) if v1.supports(TableCapability.V1_BATCH_WRITE) =>
write match {
case v1Write: V1Write =>
OverwriteByExpressionExecV1(v1, query, refreshCache(r), v1Write) :: Nil
case v2Write =>
throw QueryCompilationErrors.batchWriteCapabilityError(
v1, v2Write.getClass.getName, classOf[V1Write].getName)
}
case OverwriteByExpression(r: DataSourceV2Relation, _, query, _, _, Some(write)) =>
OverwriteByExpressionExec(planLater(query), refreshCache(r), write) :: Nil
case OverwritePartitionsDynamic(r: DataSourceV2Relation, query, _, _, Some(write)) =>
OverwritePartitionsDynamicExec(planLater(query), refreshCache(r), write) :: Nil
case DeleteFromTable(relation, condition) =>
relation match {
case DataSourceV2ScanRelation(r, _, output) =>
val table = r.table
if (condition.exists(SubqueryExpression.hasSubquery)) {
throw QueryCompilationErrors.unsupportedDeleteByConditionWithSubqueryError(condition)
}
// fail if any filter cannot be converted.
// correctness depends on removing all matching data.
val filters = DataSourceStrategy.normalizeExprs(condition.toSeq, output)
.flatMap(splitConjunctivePredicates(_).map {
f => DataSourceStrategy.translateFilter(f, true).getOrElse(
throw QueryCompilationErrors.cannotTranslateExpressionToSourceFilterError(f))
}).toArray
if (!table.asDeletable.canDeleteWhere(filters)) {
throw QueryCompilationErrors.cannotDeleteTableWhereFiltersError(table, filters)
}
DeleteFromTableExec(table.asDeletable, filters, refreshCache(r)) :: Nil
case _ =>
throw QueryCompilationErrors.deleteOnlySupportedWithV2TablesError()
}
case WriteToContinuousDataSource(writer, query, customMetrics) =>
WriteToContinuousDataSourceExec(writer, planLater(query), customMetrics) :: Nil
case DescribeNamespace(ResolvedNamespace(catalog, ns), extended, output) =>
DescribeNamespaceExec(output, catalog.asNamespaceCatalog, ns, extended) :: Nil
case DescribeRelation(r: ResolvedTable, partitionSpec, isExtended, output) =>
if (partitionSpec.nonEmpty) {
throw QueryCompilationErrors.describeDoesNotSupportPartitionForV2TablesError()
}
DescribeTableExec(output, r.table, isExtended) :: Nil
case DescribeColumn(_: ResolvedTable, column, isExtended, output) =>
column match {
case c: Attribute =>
DescribeColumnExec(output, c, isExtended) :: Nil
case nested =>
throw QueryCompilationErrors.commandNotSupportNestedColumnError(
"DESC TABLE COLUMN", toPrettySQL(nested))
}
case DropTable(r: ResolvedTable, ifExists, purge) =>
DropTableExec(r.catalog, r.identifier, ifExists, purge, invalidateTableCache(r)) :: Nil
case _: NoopCommand =>
LocalTableScanExec(Nil, Nil) :: Nil
case RenameTable(r @ ResolvedTable(catalog, oldIdent, _, _), newIdent, isView) =>
if (isView) {
throw QueryCompilationErrors.cannotRenameTableWithAlterViewError()
}
RenameTableExec(
catalog,
oldIdent,
newIdent.asIdentifier,
invalidateTableCache(r),
session.sharedState.cacheManager.cacheQuery) :: Nil
case SetNamespaceProperties(ResolvedNamespace(catalog, ns), properties) =>
AlterNamespaceSetPropertiesExec(catalog.asNamespaceCatalog, ns, properties) :: Nil
case SetNamespaceLocation(ResolvedNamespace(catalog, ns), location) =>
AlterNamespaceSetPropertiesExec(
catalog.asNamespaceCatalog,
ns,
Map(SupportsNamespaces.PROP_LOCATION -> location)) :: Nil
case CommentOnNamespace(ResolvedNamespace(catalog, ns), comment) =>
AlterNamespaceSetPropertiesExec(
catalog.asNamespaceCatalog,
ns,
Map(SupportsNamespaces.PROP_COMMENT -> comment)) :: Nil
case CreateNamespace(catalog, namespace, ifNotExists, properties) =>
CreateNamespaceExec(catalog, namespace, ifNotExists, properties) :: Nil
case DropNamespace(ResolvedNamespace(catalog, ns), ifExists, cascade) =>
DropNamespaceExec(catalog, ns, ifExists, cascade) :: Nil
case ShowNamespaces(ResolvedNamespace(catalog, ns), pattern, output) =>
ShowNamespacesExec(output, catalog.asNamespaceCatalog, ns, pattern) :: Nil
case ShowTables(ResolvedNamespace(catalog, ns), pattern, output) =>
ShowTablesExec(output, catalog.asTableCatalog, ns, pattern) :: Nil
case SetCatalogAndNamespace(catalogManager, catalogName, ns) =>
SetCatalogAndNamespaceExec(catalogManager, catalogName, ns) :: Nil
case r: ShowCurrentNamespace =>
ShowCurrentNamespaceExec(r.output, r.catalogManager) :: Nil
case r @ ShowTableProperties(rt: ResolvedTable, propertyKey, output) =>
ShowTablePropertiesExec(output, rt.table, propertyKey) :: Nil
case AnalyzeTable(_: ResolvedTable, _, _) | AnalyzeColumn(_: ResolvedTable, _, _) =>
throw QueryCompilationErrors.analyzeTableNotSupportedForV2TablesError()
case AddPartitions(
r @ ResolvedTable(_, _, table: SupportsPartitionManagement, _), parts, ignoreIfExists) =>
AddPartitionExec(
table,
parts.asResolvedPartitionSpecs,
ignoreIfExists,
recacheTable(r)) :: Nil
case DropPartitions(
r @ ResolvedTable(_, _, table: SupportsPartitionManagement, _),
parts,
ignoreIfNotExists,
purge) =>
DropPartitionExec(
table,
parts.asResolvedPartitionSpecs,
ignoreIfNotExists,
purge,
recacheTable(r)) :: Nil
case RenamePartitions(
r @ ResolvedTable(_, _, table: SupportsPartitionManagement, _), from, to) =>
RenamePartitionExec(
table,
Seq(from).asResolvedPartitionSpecs.head,
Seq(to).asResolvedPartitionSpecs.head,
recacheTable(r)) :: Nil
case RecoverPartitions(_: ResolvedTable) =>
throw QueryCompilationErrors.alterTableRecoverPartitionsNotSupportedForV2TablesError()
case SetTableSerDeProperties(_: ResolvedTable, _, _, _) =>
throw QueryCompilationErrors.alterTableSerDePropertiesNotSupportedForV2TablesError()
case LoadData(_: ResolvedTable, _, _, _, _) =>
throw QueryCompilationErrors.loadDataNotSupportedForV2TablesError()
case ShowCreateTable(rt: ResolvedTable, asSerde, output) =>
if (asSerde) {
throw QueryCompilationErrors.showCreateTableAsSerdeNotSupportedForV2TablesError()
}
ShowCreateTableExec(output, rt.table) :: Nil
case TruncateTable(r: ResolvedTable) =>
TruncateTableExec(
r.table.asTruncatable,
recacheTable(r)) :: Nil
case TruncatePartition(r: ResolvedTable, part) =>
TruncatePartitionExec(
r.table.asPartitionable,
Seq(part).asResolvedPartitionSpecs.head,
recacheTable(r)) :: Nil
case ShowColumns(_: ResolvedTable, _, _) =>
throw QueryCompilationErrors.showColumnsNotSupportedForV2TablesError()
case r @ ShowPartitions(
ResolvedTable(catalog, _, table: SupportsPartitionManagement, _),
pattern @ (None | Some(_: ResolvedPartitionSpec)), output) =>
ShowPartitionsExec(
output,
catalog,
table,
pattern.map(_.asInstanceOf[ResolvedPartitionSpec])) :: Nil
case RepairTable(_: ResolvedTable, _, _) =>
throw QueryCompilationErrors.repairTableNotSupportedForV2TablesError()
case r: CacheTable =>
CacheTableExec(r.table, r.multipartIdentifier, r.isLazy, r.options) :: Nil
case r: CacheTableAsSelect =>
CacheTableAsSelectExec(r.tempViewName, r.plan, r.originalText, r.isLazy, r.options) :: Nil
case r: UncacheTable =>
def isTempView(table: LogicalPlan): Boolean = table match {
case SubqueryAlias(_, v: View) => v.isTempView
case _ => false
}
UncacheTableExec(r.table, cascade = !isTempView(r.table)) :: Nil
case a: AlterTableCommand if a.table.resolved =>
val table = a.table.asInstanceOf[ResolvedTable]
AlterTableExec(table.catalog, table.identifier, a.changes) :: Nil
case _ => Nil
}
}
| jiangxb1987/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala | Scala | apache-2.0 | 18,776 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.features.avro.serde
import java.nio.ByteBuffer
import com.vividsolutions.jts.io.InStream
import org.apache.avro.io.Decoder
import org.locationtech.geomesa.features.avro.AvroSimpleFeature
import org.locationtech.geomesa.utils.text.WKBUtils
/**
* AvroSimpleFeature version 2 changes serialization of Geometry types from
* WKT (Well Known Text) to WKB (Well Known Binary)
*/
object Version2Deserializer extends ASFDeserializer {
override def setGeometry(sf: AvroSimpleFeature, field: String, in: Decoder): Unit = {
val bb = in.readBytes(null)
val bytes = new Array[Byte](bb.remaining)
bb.get(bytes)
val geom = WKBUtils.read(bytes)
sf.setAttributeNoConvert(field, geom)
}
class BBInStream(bb: ByteBuffer) extends InStream {
override def read(buf: Array[Byte]): Unit = bb.get(buf)
}
override def consumeGeometry(in: Decoder) = in.skipBytes()
}
| nagavallia/geomesa | geomesa-features/geomesa-feature-avro/src/main/scala/org/locationtech/geomesa/features/avro/serde/Version2Deserializer.scala | Scala | apache-2.0 | 1,378 |
package api
import javax.inject.{Inject, Singleton}
import com.payu.shorturl.model.Url
import com.payu.shorturl.service.ShortUrlService
@Singleton
class ShortUrlApi @Inject()(
shortUrlService: ShortUrlService
) extends Api {
def create() = Action.async(json[Url.Create]) { implicit request =>
val input = request.body
val result = shortUrlService.create(input)
Ok.asJson(result)
}
def get(urlShortened: String) = Action.async { implicit request =>
shortUrlService.getByUrlShortened(urlShortened) map {
case Some(localUrl) => TemporaryRedirect(localUrl.url)
case None => NotFound
}
}
}
| felipehaack/shorturl | payu-api/app/api/ShortUrlApi.scala | Scala | gpl-3.0 | 690 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.data
import org.apache.accumulo.core.client.mock.MockInstance
import org.apache.accumulo.core.client.security.tokens.PasswordToken
import org.apache.accumulo.core.security.Authorizations
import org.geotools.data._
import org.geotools.data.collection.ListFeatureCollection
import org.geotools.data.simple.SimpleFeatureStore
import org.geotools.factory.{CommonFactoryFinder, Hints}
import org.geotools.feature.simple.SimpleFeatureBuilder
import org.geotools.filter.identity.FeatureIdImpl
import org.locationtech.geomesa.features.avro.AvroSimpleFeatureFactory
import org.locationtech.geomesa.security.SecurityUtils
import org.locationtech.geomesa.utils.geotools.Conversions._
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.text.WKTUtils
import org.opengis.feature.simple.SimpleFeatureType
import org.specs2.mutable.Specification
import scala.collection.JavaConversions._
class VisibilitiesTest extends Specification {
sequential
"handle per feature visibilities" should {
val mockInstance = new MockInstance("perfeatureinstance")
val conn = mockInstance.getConnector("myuser", new PasswordToken("mypassword".getBytes("UTF8")))
conn.securityOperations().changeUserAuthorizations("myuser", new Authorizations("user", "admin"))
conn.securityOperations().createLocalUser("nonpriv", new PasswordToken("nonpriv".getBytes("UTF8")))
conn.securityOperations().changeUserAuthorizations("nonpriv", new Authorizations("user"))
// create the data store
val ds = DataStoreFinder.getDataStore(Map(
"instanceId" -> "perfeatureinstance",
"zookeepers" -> "zoo1:2181,zoo2:2181,zoo3:2181",
"user" -> "myuser",
"password" -> "mypassword",
"tableName" -> "testwrite",
"useMock" -> "true",
"featureEncoding" -> "avro")).asInstanceOf[AccumuloDataStore]
val sftName = "perfeatureauthtest"
val sft = SimpleFeatureTypes.createType(sftName, s"name:String,dtg:Date,*geom:Point:srid=4326")
sft.setDtgField("dtg")
ds.createSchema(sft)
// write some data
val fs = ds.getFeatureSource(sftName).asInstanceOf[AccumuloFeatureStore]
val features = getFeatures(sft).toList
val privFeatures = features.take(3)
privFeatures.foreach { f => f.getUserData.put(SecurityUtils.FEATURE_VISIBILITY, "user&admin") }
val nonPrivFeatures = features.drop(3)
nonPrivFeatures.foreach { f => f.getUserData.put(SecurityUtils.FEATURE_VISIBILITY, "user") }
fs.addFeatures(new ListFeatureCollection(sft, privFeatures ++ nonPrivFeatures))
fs.flush()
val ff = CommonFactoryFinder.getFilterFactory2
import ff.{literal => lit, property => prop, _}
val unprivDS = DataStoreFinder.getDataStore(Map(
"instanceId" -> "perfeatureinstance",
"zookeepers" -> "zoo1:2181,zoo2:2181,zoo3:2181",
"user" -> "nonpriv",
"password" -> "nonpriv",
"tableName" -> "testwrite",
"useMock" -> "true",
"featureEncoding" -> "avro")).asInstanceOf[AccumuloDataStore]
"nonpriv should only be able to read a subset of features" in {
"using ALL queries" in {
val reader = unprivDS.getFeatureReader(new Query(sftName), Transaction.AUTO_COMMIT)
val readFeatures = reader.getIterator.toList
readFeatures.size must be equalTo 3
}
"using ST queries" in {
val filter = bbox(prop("geom"), 44.0, 44.0, 46.0, 46.0, "EPSG:4326")
val reader = unprivDS.getFeatureReader(new Query(sftName, filter), Transaction.AUTO_COMMIT)
reader.getIterator.toList.size must be equalTo 3
}
"using attribute queries" in {
val filter = or(
ff.equals(prop("name"), lit("1")),
ff.equals(prop("name"), lit("4")))
val reader = unprivDS.getFeatureReader(new Query(sftName, filter), Transaction.AUTO_COMMIT)
reader.getIterator.toList.size must be equalTo 1
}
}
"priv should be able to read all 6 features" in {
"using ALL queries" in {
val reader = ds.getFeatureReader(new Query(sftName), Transaction.AUTO_COMMIT)
val readFeatures = reader.getIterator.toList
readFeatures.size must be equalTo 6
}
"using ST queries" in {
val filter = bbox(prop("geom"), 44.0, 44.0, 46.0, 46.0, "EPSG:4326")
val reader = ds.getFeatureReader(new Query(sftName, filter), Transaction.AUTO_COMMIT)
reader.getIterator.toList.size must be equalTo 6
}
"using attribute queries" in {
val filter = or(
ff.equals(prop("name"), lit("1")),
ff.equals(prop("name"), lit("4")))
val reader = ds.getFeatureReader(new Query(sftName, filter), Transaction.AUTO_COMMIT)
reader.getIterator.toList.size must be equalTo 2
}
}
}
"remove should continue to work as expected" in {
val instanceId = "removeviz"
val mockInstance = new MockInstance(instanceId)
val conn = mockInstance.getConnector("myuser", new PasswordToken("mypassword".getBytes("UTF8")))
conn.securityOperations().changeUserAuthorizations("myuser", new Authorizations("user", "admin"))
conn.securityOperations().createLocalUser("nonpriv", new PasswordToken("nonpriv".getBytes("UTF8")))
conn.securityOperations().changeUserAuthorizations("nonpriv", new Authorizations("user"))
// create the data store
val ds = DataStoreFinder.getDataStore(Map(
"instanceId" -> instanceId,
"zookeepers" -> "zoo1:2181,zoo2:2181,zoo3:2181",
"user" -> "myuser",
"password" -> "mypassword",
"tableName" -> "testwrite",
"useMock" -> "true",
"featureEncoding" -> "avro")).asInstanceOf[AccumuloDataStore]
val sftName = "perfeatureauthtest"
val sft = SimpleFeatureTypes.createType(sftName, s"name:String,dtg:Date,*geom:Point:srid=4326")
sft.setDtgField("dtg")
ds.createSchema(sft)
// write some data
val fs = ds.getFeatureSource(sftName).asInstanceOf[AccumuloFeatureStore]
val features = getFeatures(sft).toList
val privFeatures = features.take(3)
privFeatures.foreach { f => f.getUserData.put(SecurityUtils.FEATURE_VISIBILITY, "user&admin") }
val nonPrivFeatures = features.drop(3)
nonPrivFeatures.foreach { f => f.getUserData.put(SecurityUtils.FEATURE_VISIBILITY, "user") }
fs.addFeatures(new ListFeatureCollection(sft, privFeatures ++ nonPrivFeatures))
fs.flush()
val ff = CommonFactoryFinder.getFilterFactory2
import ff.{literal => lit, property => prop, _}
val unprivDS = DataStoreFinder.getDataStore(Map(
"instanceId" -> instanceId,
"zookeepers" -> "zoo1:2181,zoo2:2181,zoo3:2181",
"user" -> "nonpriv",
"password" -> "nonpriv",
"tableName" -> "testwrite",
"useMock" -> "true",
"featureEncoding" -> "avro")).asInstanceOf[AccumuloDataStore]
"priv should be able to delete a feature" in {
fs.removeFeatures(ff.id(new FeatureIdImpl("1")))
fs.flush()
"using ALL queries" in {
fs.getFeatures(Query.ALL).features().toList.size must be equalTo 5
}
"using record id queries" in {
fs.getFeatures(ff.id(ff.featureId("1"))).features().hasNext must beFalse
}
"using attribute queries" in {
val filter = or(
ff.equals(prop("name"), lit("1")),
ff.equals(prop("name"), lit("4")))
val reader = ds.getFeatureReader(new Query(sftName, filter), Transaction.AUTO_COMMIT)
reader.getIterator.toList.size must be equalTo 1
}
}
"nonpriv should not be able to delete a priv feature" in {
val unprivFS = unprivDS.getFeatureSource(sftName).asInstanceOf[SimpleFeatureStore]
unprivFS.removeFeatures(ff.id(new FeatureIdImpl("2")))
unprivFS.flush()
"priv should still see the feature that was attempted to be deleted" in {
fs.getFeatures(ff.id(ff.featureId("2"))).features().hasNext must beTrue
}
}
}
val hints = new Hints(Hints.FEATURE_FACTORY, classOf[AvroSimpleFeatureFactory])
val featureFactory = CommonFactoryFinder.getFeatureFactory(hints)
def getFeatures(sft: SimpleFeatureType) = (0 until 6).map { i =>
val builder = new SimpleFeatureBuilder(sft, featureFactory)
builder.set("geom", WKTUtils.read("POINT(45.0 45.0)"))
builder.set("dtg", "2012-01-02T05:06:07.000Z")
builder.set("name",i.toString)
val sf = builder.buildFeature(i.toString)
sf.getUserData()(Hints.USE_PROVIDED_FID) = java.lang.Boolean.TRUE
sf
}
}
| mdzimmerman/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/data/VisibilitiesTest.scala | Scala | apache-2.0 | 9,315 |
package com.twitter.finatra.http.integration.tweetexample.main.modules
import com.twitter.finatra.http.integration.tweetexample.main.services.{MyTweetsRepository, TweetsRepository}
import com.twitter.inject.TwitterModule
object TweetsEndpointServerModule extends TwitterModule {
override def configure() {
bindSingleton[TweetsRepository].to[MyTweetsRepository]
}
}
| tom-chan/finatra | http/src/test/scala/com/twitter/finatra/http/integration/tweetexample/main/modules/TweetsEndpointServerModule.scala | Scala | apache-2.0 | 377 |
/**
* For copyright information see the LICENSE document.
*/
package entice.server.world
import entice.server.physics._
import entice.server.utils._
import entice.protocol._
import shapeless._
import scala.language.postfixOps
/**
* Manages all entities registered to it, all systems registered to it, and can
* create diffs of its state transitions.
*/
class World(
val name: String,
val messageBus: MessageBus,
val collisionMesh: CollisionMesh)
extends WorldCore
with SystemsManagement
with DiffManagement
with EventManagement
/**
* Manages all entities registered to it.
*/
private[world] trait WorldCore {
self: World =>
protected var entities: Map[Entity, (RichEntity, TypedSet[Component])] = Map()
def create(comps : TypedSet[Component]) = use(Entity(UUID()), comps)
def use(entity: Entity, comps : TypedSet[Component]) = {
val rich = RichEntity(entity, self)
entities = entities + (entity -> ((rich, comps)))
rich
}
def remove(rich: RichEntity) { remove(rich.entity) }
def remove(entity: Entity) { entities = entities - entity }
def update(entity: Entity, comps: TypedSet[Component]) {
if (!entities.contains(entity)) return
update(getRich(entity).get, comps)
}
def update(rich: RichEntity, comps: TypedSet[Component]) {
if (rich.world != this) return
entities = entities + (rich.entity -> ((rich, comps)))
}
def contains(entity: Entity) = entities.contains(entity)
def getRich(entity: Entity): Option[RichEntity] = {
entities.get(entity) match {
case Some((rich, comps)) => Some(rich)
case None => None
}
}
def getComps(entity: Entity): Option[TypedSet[Component]] = {
entities.get(entity) match {
case Some((rich, comps)) => Some(comps)
case None => None
}
}
def dump: Map[Entity, TypedSet[Component]] = {
(for ((entity, (rich, comps)) <- entities) yield
(entity -> comps.deepClone))
.toMap
}
}
| entice/old-server | src/main/scala/entice/server/world/World.scala | Scala | bsd-3-clause | 2,105 |
package io.igl.jwt
import play.api.libs.json.{JsString, JsValue}
trait HeaderValue extends JwtValue {
val field: HeaderField
}
trait HeaderField extends JwtField {
def attemptApply(value: JsValue): Option[HeaderValue]
}
case class Typ(value: String) extends HeaderValue {
override val field: HeaderField = Typ
override val jsValue: JsValue = JsString(value)
}
object Typ extends HeaderField {
override def attemptApply(value: JsValue): Option[Typ] =
value.asOpt[String].map(apply)
override val name = "typ"
}
case class Alg(value: Algorithm) extends HeaderValue {
override val field: HeaderField = Alg
override val jsValue: JsValue = JsString(value.name)
}
object Alg extends HeaderField {
override def attemptApply(value: JsValue): Option[Alg] =
value.asOpt[String].flatMap(Algorithm.getAlgorithm).map(apply)
override val name = "alg"
}
case object Cty extends HeaderField with HeaderValue {
override def attemptApply(value: JsValue): Option[HeaderValue] =
value.asOpt[String].map{case this.value => Cty}
override val name = "cty"
override val field: HeaderField = this
override val value = "JWT"
override val jsValue: JsValue = JsString(value)
}
| iain-logan/jwt | src/main/scala/io/igl/jwt/Headers.scala | Scala | mit | 1,201 |
package amailp.intellij.robot.structureView
import com.intellij.navigation.ItemPresentation
import javax.swing.Icon
import com.intellij.ide.util.treeView.smartTree.TreeElement
import com.intellij.ide.structureView.StructureViewTreeElement
import amailp.intellij.robot.lexer.RobotIElementType
import amailp.intellij.robot.psi.RobotPsiElement
import com.intellij.ide.util.PsiNavigationSupport
import com.intellij.pom.Navigatable
trait InStructureView extends RobotPsiElement {
def structureViewText: String
def structureViewIcon: Icon
def structureViewChildrenTokenTypes: List[RobotIElementType]
val structureTreeElement: StructureViewTreeElement = new StructureViewTreeElement {
def getChildren: Array[TreeElement] = {
for {
//TODO Use PsiTreeUtil and get rid of intermediate class RobotPsiElement
child <- findChildrenByType[InStructureView](structureViewChildrenTokenTypes)
} yield child.structureTreeElement
}.toArray
def getPresentation: ItemPresentation = new ItemPresentation {
def getPresentableText: String = structureViewText
def getLocationString: String = null
def getIcon(unused: Boolean): Icon = structureViewIcon
}
def canNavigateToSource: Boolean = true
def canNavigate: Boolean = getNavigatable.canNavigate
def navigate(requestFocus: Boolean): Unit = getNavigatable.navigate(requestFocus)
private def getNavigatable: Navigatable = PsiNavigationSupport.getInstance().getDescriptor(InStructureView.this)
def getValue: AnyRef = InStructureView.this
}
}
| puhnastik/robot-plugin | src/main/scala/amailp/intellij/robot/structureView/InStructureView.scala | Scala | gpl-3.0 | 1,561 |
package markpeng.scala.learningscala
object Ch5Ex {
// placeholder syntax
def tripleOp[A, B](a: A, b: A, c: A, f: (A, A, A) => B) = f(a, b, c)
/**
* (Int, Int) => Boolean
*/
def factorOf(x: Int, y: Int) = y % x == 0
/**
* Int => Int => Boolean
*/
def factorOf2(x: Int)(y: Int) = y % x == 0
/**
* Invoking higher order function with function literal blocks
*/
def safeStringOp(s: String)(f: String => String) = {
if (s != null) f(s) else s
}
def main(args: Array[String]) {
// shortcut using wildcard operator
val f = factorOf _
println(f)
// partially apply
val f2 = factorOf(3, _: Int)
println(f2)
println(f2(78))
// currying
val f3 = factorOf2(3) _
println(f3)
println(f3(78))
val intCase = tripleOp[Int, Int](23, 92, 14, _ * _ + _)
println(intCase)
val doubleCase = tripleOp[Int, Double](23, 92, 14, 1.0 * _ / _ / _)
println(doubleCase)
val boolCase = tripleOp[Int, Boolean](23, 92, 14, _ > _ + _)
println(boolCase)
// function literal block
val uuid = java.util.UUID.randomUUID().toString
val timedUUID = safeStringOp(uuid) {
s =>
val now = System.currentTimeMillis
val timed = s.take(24) + now
timed.toUpperCase
}
println(timedUUID)
// Ex 1.
// function literal
val highestNum = (x: Int, y: Int) => if (x > y) x else y
// higher-order function
def highestNumWrapper(x: (Int, Int, Int), f: (Int, Int) => Int) = f(x._1, f(x._2, x._3))
println(highestNumWrapper((14, 7, 9), highestNum))
// Ex 3.
def ex3Func(a: Int) = (b: Int) => a * b
val tripler = ex3Func(3)
println(tripler(10))
}
} | guitarmind/play-scala | src/main/scala/markpeng/scala/learningscala/Ch5Ex.scala | Scala | apache-2.0 | 1,700 |
package org.denigma.kappa.notebook.views.comments
import org.denigma.codemirror.Editor
import org.scalajs.dom.html._
/**
* Created by antonkulaga on 8/28/16.
*/
trait Watcher {
type Data
def parse(editor: Editor, lines: List[(Int, String)], currentNum: Int): Unit
}
| antonkulaga/kappa-notebook | app/js/src/main/scala/org/denigma/kappa/notebook/views/comments/Watcher.scala | Scala | mpl-2.0 | 279 |
package io.scalaland.chimney
import io.scalaland.chimney.dsl._
import io.scalaland.chimney.utils.OptionUtils._
import utest._
object ErrorPathSpec extends TestSuite {
val tests = Tests {
"error path should capture for" - {
type V[+A] = Either[List[TransformationError[String]], A]
def printError(err: TransformationError[String]): String =
s"${err.message} on ${err.showErrorPath}"
implicit val intParse: TransformerF[V, String, Int] =
str => str.parseInt.fold[V[Int]](Left(List(TransformationError(s"Can't parse int from $str"))))(Right(_))
"root" - {
val errors = "invalid".transformIntoF[V, Int]
errors.left.map(_.map(_.message)) ==> Left(List("Can't parse int from invalid"))
errors.left.map(_.map(_.showErrorPath)) ==> Left(List(""))
}
"case classes" - {
case class Foo(a: String, b: String, c: InnerFoo, d: String)
case class InnerFoo(d: String, e: String)
case class Bar(a: Int, b: Int, c: InnerBar, d: String)
case class InnerBar(d: Int, e: Int)
Foo("mmm", "nnn", InnerFoo("lll", "jjj"), "d").transformIntoF[V, Bar].left.map(_.map(printError)) ==>
Left(
List(
"Can't parse int from mmm on a",
"Can't parse int from nnn on b",
"Can't parse int from lll on c.d",
"Can't parse int from jjj on c.e"
)
)
}
"list" - {
case class Foo(list: List[String])
case class Bar(list: List[Int])
Foo(List("a", "b", "c")).transformIntoF[V, Bar].left.map(_.map(printError)) ==>
Left(
List(
"Can't parse int from a on list(0)",
"Can't parse int from b on list(1)",
"Can't parse int from c on list(2)"
)
)
}
"map" - {
case class StrWrapper1(str: String)
case class StrWrapper2(value: String)
implicit val strWrapper1ToStrWrapper2: Transformer[StrWrapper1, StrWrapper2] =
wrapper => StrWrapper2(wrapper.str)
case class Foo(map: Map[String, String], map2: Map[String, String], map3: Map[StrWrapper1, StrWrapper1])
case class Bar(map: Map[Int, Int], map2: Map[String, Int], map3: Map[StrWrapper2, StrWrapper2])
case class Bar2(list: List[(Int, Int)], list2: List[(String, Int)])
val foo = Foo(Map("a" -> "b", "c" -> "d"), Map("e" -> "f"), Map(StrWrapper1("i") -> StrWrapper1("j")))
val errors = Left(
List(
"Can't parse int from a on map.keys(a)",
"Can't parse int from b on map(a)",
"Can't parse int from c on map.keys(c)",
"Can't parse int from d on map(c)",
"Can't parse int from f on map2(e)"
)
)
foo.transformIntoF[V, Bar].left.map(_.map(printError)) ==> errors
foo
.intoF[V, Bar2]
.withFieldRenamed(_.map, _.list)
.withFieldRenamed(_.map2, _.list2)
.transform
.left
.map(_.map(printError)) ==> errors
val error = compileError("""Map("a" -> "b").transformIntoF[V, Map[Double, Double]]""")
error.check(
"",
"derivation from k: java.lang.String to scala.Double is not supported in Chimney!"
)
error.check(
"",
"derivation from v: java.lang.String to scala.Double is not supported in Chimney!"
)
}
"java beans" - {
class Foo(a: String, b: String) {
def getA: String = a
def getB: String = b
}
case class Bar(a: Int, b: Int)
new Foo("a", "b")
.intoF[V, Bar]
.enableBeanGetters
.transform
.left
.map(_.map(printError)) ==>
Left(
List(
"Can't parse int from a on getA",
"Can't parse int from b on getB"
)
)
}
"tuples" - {
("a", "b").transformIntoF[V, (Int, Int)].left.map(_.map(printError)) ==>
Left(
List(
"Can't parse int from a on _1",
"Can't parse int from b on _2"
)
)
}
"case classes with DSL" - {
case class Foo(inner: InnerFoo)
case class InnerFoo(str: String)
case class Bar(inner: InnerBar, b: Int)
case class InnerBar(int1: Int, int2: Int, double: Double)
implicit val innerT: TransformerF[V, InnerFoo, InnerBar] =
TransformerF
.define[V, InnerFoo, InnerBar]
.withFieldRenamed(_.str, _.int1)
.withFieldConstF(_.int2, intParse.transform("notint"))
.withFieldComputedF(
_.double,
foo =>
foo.str.parseDouble
.fold[V[Double]](Left(List(TransformationError(s"Can't parse int from ${foo.str}"))))(Right(_))
)
.buildTransformer
Foo(InnerFoo("aaa"))
.intoF[V, Bar]
.withFieldConstF(_.b, intParse.transform("bbb"))
.transform
.left
.map(_.map(printError)) ==> Left(
List(
"Can't parse int from aaa on inner.str",
"Can't parse int from notint on inner",
"Can't parse int from aaa on inner",
"Can't parse int from bbb on "
)
)
}
}
}
}
| scalalandio/chimney | chimney/src/test/scala/io/scalaland/chimney/ErrorPathSpec.scala | Scala | apache-2.0 | 5,448 |
package org.receiver2d.engine.geometry
import org.receiver2d.engine.math._
import scala.collection.immutable.IndexedSeq
class Poly(val verts: IndexedSeq[Vec2]) extends Shape[Poly] with Iterable[Vec2] {
require(verts.length >= 3)
def apply(i: Int) = verts.apply(i)
def iterator: Iterator[Vec2] = verts.iterator
lazy val tris =
(verts :+ verts.head).sliding(3,2).filter(_.size==3).map(_.toArray).toArray
override def *(m: Matrix) = new Poly(verts map (m * _) map (v => Vec2(v.get(0)(0), v.get(1)(0))))
lazy val com = verts.fold(Vec2.ZERO)(_ + _) / verts.size
/** @see http://www.mathopenref.com/coordpolygonarea.html **/
lazy val area = (verts :+ verts.head).sliding(2).map{
case IndexedSeq(v1,v2) => v1 x v2
}.sum.abs / 2f
/**
* Returns a new axis-aligned bounding box for this entity.
*/
def getAABB = {
val mmx = getMinMax(Vec2(1, 0))
val mmy = getMinMax(Vec2(0, 1))
new Rect(Vec2(mmx.minp, mmy.minp), Vec2(mmx.maxp, mmy.maxp))
}
/**
* Gets the minimum and maximum points projected along the given axis.
*/
def getMinMax(axis: Vec2) = {
val min = verts minBy (_ project axis)
val max = verts maxBy (_ project axis)
new MinMax(min, max, min project axis, max project axis, axis)
}
/**
* Gets all normal vectors extending through every side of this polygon.
* A better name for this method might also be getAxes()
*/
def getNormals: IndexedSeq[Vec2] =
for {
i <- verts.indices
j = if (i == verts.length-1) 0 else i+1
} yield (verts(j)-verts(i)).normL
/**
* Gets the point on this polygon that is farthest along the given axis
*/
def getSupportPoint(axis: Vec2) = verts.maxBy(_ project axis)
override def toString = verts mkString ", "
}
| Prince781/Receiver2D | src/main/scala/org/receiver2d/engine/geometry/Poly.scala | Scala | gpl-2.0 | 1,771 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa
import java.io.Flushable
import org.geotools.data.simple.SimpleFeatureWriter
import org.locationtech.geomesa.utils.conf.GeoMesaSystemProperties.SystemProperty
package object index {
type FlushableFeatureWriter = SimpleFeatureWriter with Flushable
val FilterCacheSize = SystemProperty("geomesa.cache.filters.size", "1000")
val ZFilterCacheSize = SystemProperty("geomesa.cache.z-filters.size", "1000")
val PartitionParallelScan = SystemProperty("geomesa.partition.scan.parallel", "false")
val DistributedLockTimeout = SystemProperty("geomesa.distributed.lock.timeout", "2 minutes")
}
| aheyne/geomesa | geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/package.scala | Scala | apache-2.0 | 1,098 |
// Copyright (c) 2013, Johns Hopkins University. All rights reserved.
// This software is released under the 2-clause BSD license.
// See /LICENSE.txt
// Travis Wolfe, twolfe18@gmail.com, 30 July 2013
package edu.jhu.hlt.parma.input
import edu.jhu.hlt.parma.annotation.PredArgSelector
import edu.jhu.hlt.parma.util._
import edu.jhu.hlt.parma.util.AnnotationAligner.HalfAlignment
import edu.jhu.hlt.parma.types._
import edu.jhu.hlt.parma.features.ReportingVerbs
import scala.collection.mutable.{ ArrayBuffer, HashSet, HashMap, Buffer }
import scala.collection.JavaConversions._
import java.io.File
object LeastOverlapReader extends DocumentReader[ParametricDocAlignment[DocumentBuilder]] {
override def domain: String = "LeastOverlap"
// read in documents from f1 and f2
// go line by line in alignments.xml, make and accumulate alignments
// every time you hit a new doc in alignments.xml, produce a DocAlignment
override def getAlignedDocuments: Seq[ParametricDocAlignment[DocumentBuilder]] = {
println("[LeastOverlapReader getAlignedDocuments] reading in documents...")
val f1f = ParmaConfig.getFile("data.least-overlap.reports")
val f2f = ParmaConfig.getFile("data.least-overlap.passages")
Profiler.startTask("least-overlap:docs")
val docMap = Seq(f1f, f2f).flatMap(f => {
println(f.getCanonicalPath)
ConcreteWrapper.getCommunicationsFrom(f)
}).map(d => (d.getGuid.getCommunicationId, d)).toMap
val docTime = Profiler.endTask("least-overlap:docs")
println("[LeastOverlapReader getAlignedDocuments] done, read in %d documents in %.1f seconds"
.format(docMap.size, docTime / 1000d))
val Doc = """<DOC id="(\S+)">""".r
val Ignore = """(</?TEXT>|</DOC>)""".r
val Alignment = """(\d+)-(\d+)""".r
val alignments = new ArrayBuffer[ParametricDocAlignment[DocumentBuilder]]
var alignmentSentence = 0 // running sentence index w.r.t. alignments
var report: DocumentBuilder = null
val reportHAs = new ArrayBuffer[HalfAlignment[Argument]]
var passage: DocumentBuilder = null
val passageHAs = new ArrayBuffer[HalfAlignment[Argument]]
//val semiAlignments = new ArrayBuffer[SemiAlignment]
// HIGH LEVEL IDEA:
// 1) read in doc id, look up documents
// 2) call PredArgSelector, identify preds/args
// 3) step through alignments, if matches existing pred/arg make HalfAlignment
// 4) call AnnotationAligner.makeDocAlignment using HalfAlignments accumulated
val af = ParmaConfig.getFile("data.least-overlap.alignments")
var corefSet = 0
val r = FileUtils.getReader(af)
while (r.ready) {
r.readLine.trim match {
case Ignore(s) => {}
// ==== NEW DOC PAIR ====
case Doc(id) => { // ids in alignment file don't have .a and .b extensions
assert((report == null) == (passage == null))
if (report != null) {
// dump previous alignment
// 4) call AnnotationAligner.makeDocAlignment using HalfAlignments accumulated
alignments += AnnotationAligner.makeDocAlignment(
report, reportHAs.toSeq,
passage, passageHAs.toSeq,
Some(domain), addPredArgs=false, strict=false)
}
// 1) read in doc id, look up documents
// 2) call PredArgSelector, identify preds/args
report = new RichConcreteDocBuilder(id + ".a", docMap(id + ".a"))
report = PredArgSelector.identifyPredicatesAndArguments(report)
reportHAs.clear
passage = new RichConcreteDocBuilder(id + ".b", docMap(id + ".b").toBuilder.build)
passage = PredArgSelector.identifyPredicatesAndArguments(passage)
passageHAs.clear
alignmentSentence = 0
corefSet = 0
}
// ==== ALIGNMENT LINE ====
case s => {
assert(report != null && passage != null)
// 3) step through alignments, if matches existing pred/arg make HalfAlignment
Alignment.findAllMatchIn(s).foreach(m => {
val rTokIdx = m.group(1).toInt
val pTokIdx = m.group(2).toInt
// we are assuming these are all sure alignments because
// we have no scores to threshold on
val isSure = true
corefSet += 1
// REPORT HalfAlignment
//val rPredArg = report.predOrArgMatching(MentionBuilder.from(alignmentSentence, rTokIdx))
val rPredArg = predOrArgMatching(MentionBuilder.from(alignmentSentence, rTokIdx), report)
rPredArg match {
case Some(epa) =>
//println("[LeastOverlap] adding REPORT HA: " + epa)
reportHAs += new HalfAlignment(epa, corefSet.toString, isSure)
case None => {} // no pred or arg matching this token
}
// PASSAGE HalfAlignment
//val pPredArg = passage.predOrArgMatching(MentionBuilder.from(alignmentSentence, pTokIdx))
val pPredArg = predOrArgMatching(MentionBuilder.from(alignmentSentence, pTokIdx), passage)
pPredArg match {
case Some(epa) =>
//println("[LeastOverlap] adding PASSAGE HA: " + epa)
passageHAs += new HalfAlignment(epa, corefSet.toString, isSure)
case None => {} // no pred or arg matching this token
}
})
alignmentSentence += 1
}
}
}
r.close
alignments.toSeq
}
def predOrArgMatching(m: Mention, d: Document): Option[Either[Predicate, Argument]] = {
val p = d.predicates.filter(_.location == m).map(Left(_))
val a = d.arguments.filter(_.location == m).map(Right(_))
(a ++ p).headOption
}
}
| hltcoe/parma | src/main/scala/edu/jhu/hlt/parma/input/LeastOverlapReader.scala | Scala | bsd-2-clause | 5,327 |
package lagcomp
import scala.concurrent._
import transport.ConnectionHandle
import scala.util.Random
class ClockSync(connection: ConnectionHandle, localTime: () => Long) {
var behavior: (String => Unit) = greeting _
private val globalTimePromise = Promise[() => Int]()
private val identityPromise = Promise[Peer]()
def futureGlobalTime: Future[() => Int] = globalTimePromise.future
def futureIdentity: Future[Peer] = identityPromise.future
def pending: Boolean = !globalTimePromise.isCompleted
val localRandom = Random.nextLong()
connection.write(localRandom.toString)
def receive(pickle: String): Unit = {
behavior(pickle)
}
def greeting(pickle: String): Unit = {
val remoteRandom = pickle.toLong
if(localRandom < remoteRandom) {
behavior = askingTime(localTime()) _
connection.write("dummy")
} else {
behavior = givingTime _
}
}
def givingTime(pickle: String): Unit = {
val now = localTime()
connection.write(now.toString)
identityPromise.success(P2)
globalTimePromise.success(() => (localTime() - now + 500).toInt * 60 / 1000)
}
def askingTime(askedAt: Long)(pickle: String): Unit = {
val now = localTime()
val tripTime = (now - askedAt) / 2
identityPromise.success(P1)
globalTimePromise.success(() => (localTime() - now + tripTime + 500).toInt * 60 / 1000)
}
}
| OlivierBlanvillain/scala-lag-comp | lag-comp/src/main/scala/ClockSync.scala | Scala | mit | 1,384 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import monix.execution.Ack.Continue
import monix.execution.cancelables.OrderedCancelable
import monix.execution.{Ack, Cancelable, Scheduler}
import monix.reactive.Observable
import monix.reactive.observers.Subscriber
import scala.concurrent.Future
import scala.util.Success
private[reactive] final class OnErrorRetryCountedObservable[+A](source: Observable[A], maxRetries: Long)
extends Observable[A] {
private def loop(subscriber: Subscriber[A], task: OrderedCancelable, retryIdx: Long): Unit = {
val cancelable = source.unsafeSubscribeFn(new Subscriber[A] {
implicit val scheduler: Scheduler = subscriber.scheduler
private[this] var isDone = false
private[this] var ack: Future[Ack] = Continue
def onNext(elem: A) = {
ack = subscriber.onNext(elem)
ack
}
def onComplete(): Unit =
if (!isDone) {
isDone = true
subscriber.onComplete()
}
def onError(ex: Throwable): Unit =
if (!isDone) {
isDone = true
if (maxRetries < 0 || retryIdx < maxRetries) {
// need asynchronous execution to avoid a synchronous loop
// blowing out the call stack
ack.onComplete {
case Success(Continue) =>
loop(subscriber, task, retryIdx + 1)
case _ =>
() // stop
}
} else {
subscriber.onError(ex)
}
}
})
// We need to do an `orderedUpdate`, because `onError` might have
// already and resubscribed by now.
task.orderedUpdate(cancelable, retryIdx)
()
}
def unsafeSubscribeFn(subscriber: Subscriber[A]): Cancelable = {
val task = OrderedCancelable()
loop(subscriber, task, retryIdx = 0)
task
}
}
| alexandru/monifu | monix-reactive/shared/src/main/scala/monix/reactive/internal/operators/OnErrorRetryCountedObservable.scala | Scala | apache-2.0 | 2,510 |
package taczombie.client.view.main
import taczombie.client.view.tui.Tui
import com.google.inject.AbstractModule
import com.google.inject.Guice
import taczombie.client.view.gui.Gui
class UiModule(ui: Array[String]) extends AbstractModule {
def configure {
ui.toList match {
case "tui" :: Nil => bind(classOf[IView]).to(classOf[Tui])
case _ => bind(classOf[IView]).to(classOf[Gui])
}
}
}
object Main {
def main(args: Array[String]) {
var restart = true
val viewInjector = Guice.createInjector(new UiModule(args))
while (restart) {
val view = viewInjector.getInstance(classOf[IView])
view.open
restart = view.runBlocking
}
}
}
| mahieke/TacZombie | gui/src/main/scala/taczombie/client/view/main/Main.scala | Scala | gpl-2.0 | 690 |
package antike
import monocle.macros.Lenses
import monocle.Lens
import State._
import Ressource._
import Moves._
import WheelState._
import Utils._
@Lenses case class State(
actives: Int,
idles: Int,
coins: Int,
fees: Int,
marble: Ressource,
iron: Ressource,
gold: Ressource,
wheelState: WheelState)
@Lenses case class Ressource(
amount: Int,
cities: Int,
temples: Int)
class Move(val ws: WheelState, val action: State => State) {
def apply(previousState: State): State = {
val fee: State => State = (previousState.wheelState, ws) match {
case (_, SubActionWS) => identity
case (TempleWS, TempleWS) => identity
case (InitialWS, newWS) => wheelState set newWS
case (prevWs, newWS) =>
val prevIndex = infinitWheel.indexOf(prevWs)
val newIndex = infinitWheel.indexOf(newWS, prevIndex + 1)
val cost = 0 max ((newIndex - prevIndex) - 3)
wheelState set newWS andThen (fees modify (_ - cost))
}
fee andThen action apply previousState
}
}
object Moves {
def collect(ressource: RessourceLens): State => State = chain(
ressource.modify(r => amount.modify(_ + r.cities + 2 * r.temples)(r)),
coins.modify(_ + 1))
def spend(amountSpent: Int, ressource: RessourceLens): State => State = { s =>
val initialAmount = ressource composeLens amount get s
val newAmount = 0 max (initialAmount - amountSpent)
val initialCoins = s.coins
// Since initialAmount + initialCoins - amountSpent = newAmount + newCoins
val newCoins = initialAmount + initialCoins - amountSpent - newAmount
ressource composeLens amount set newAmount andThen (coins set newCoins) apply s
}
def untilBroke(m: State => State): State => State = s =>
if(m(s).coins < 0) s else untilBroke(m)(m(s))
def buildCity(ressource: RessourceLens): State => State = chain(
ressource composeLens cities modify (_ + 1),
spend(1, marble),
spend(1, iron),
spend(1, gold),
actives.modify(_ - 1),
idles.modify(_ + 1))
def buildTemple(ressource: RessourceLens): State => State = chain(
ressource composeLens temples modify (_ + 1),
spend(5, marble))
case object Marble extends Move(MarbleWS, collect(marble))
case object Iron extends Move(IronWS, collect(iron))
case object Gold extends Move(GoldWS, collect(gold))
case object Science extends Move(ScienceWS, identity)
case object Movement1 extends Move(Movement1WS, { s => actives.modify(_ + s.idles) andThen idles.set(0) apply s })
case object Movement2 extends Move(Movemnt2WS, Movement1.action)
case object TempleM extends Move(TempleWS, buildTemple(marble))
case object TempleI extends Move(TempleWS, buildTemple(iron))
case object TempleG extends Move(TempleWS, buildTemple(gold))
case object TempleM2 extends Move(TempleWS, buildTemple(marble) andThen buildTemple(marble))
case object TempleI2 extends Move(TempleWS, buildTemple(iron) andThen buildTemple(iron))
case object TempleG2 extends Move(TempleWS, buildTemple(gold) andThen buildTemple(gold))
case object TempleM3 extends Move(TempleWS, buildTemple(marble) andThen buildTemple(marble) andThen buildTemple(marble))
case object TempleI3 extends Move(TempleWS, buildTemple(iron) andThen buildTemple(iron) andThen buildTemple(iron))
case object TempleG3 extends Move(TempleWS, buildTemple(gold) andThen buildTemple(gold) andThen buildTemple(gold))
case object Bellona extends Move(ArmyWS, chain(Army.action, idles.modify(_ + 1)))
case object Army extends Move(ArmyWS, untilBroke(spend(2, iron) andThen idles.modify(_ + 1)))
case object CityM extends Move(SubActionWS, buildCity(marble))
case object CityI extends Move(SubActionWS, buildCity(iron))
case object CityG extends Move(SubActionWS, buildCity(gold))
}
object WheelState {
sealed trait WheelState
case object IronWS extends WheelState
case object TempleWS extends WheelState
case object GoldWS extends WheelState
case object Movemnt2WS extends WheelState
case object ArmyWS extends WheelState
case object MarbleWS extends WheelState
case object ScienceWS extends WheelState
case object Movement1WS extends WheelState
// Special cases:
case object SubActionWS extends WheelState
case object InitialWS extends WheelState
val gameWheel = List(IronWS, TempleWS, GoldWS, Movemnt2WS, ArmyWS, MarbleWS, ScienceWS, Movement1WS)
val infinitWheel = gameWheel ::: gameWheel
}
object Utils {
implicit val wheelStateOrdering: Ordering[WheelState] = Ordering.by(_.toString)
implicit val ressourceOrdering: Ordering[Ressource] = Ordering.by(Ressource unapply _)
implicit val stateOrdering: Ordering[State] = Ordering.by(State unapply _)
def chain[A](f: A => A*) = Function.chain(f)
type RessourceLens = Lens[State, Ressource]
}
| OlivierBlanvillain/Antike | src/main/scala/Game.scala | Scala | mit | 4,832 |
package wakfutcp.protocol.common
import cats.syntax.apply._
import wakfutcp.protocol.Codec
final case class Proxy(
id: Int,
name: String,
community: Community,
server: ProxyServer,
order: Byte
)
final case class ProxyServer(address: String, ports: Array[Int])
object ProxyServer {
import Codec._
implicit val codec: Codec[ProxyServer] =
(utf8(int), array(int, int))
.imapN(apply)(Function.unlift(unapply))
}
object Proxy {
import Codec._
implicit val codec: Codec[Proxy] =
(int, utf8(int), Community.codec, ProxyServer.codec, byte)
.imapN(apply)(Function.unlift(unapply))
}
| OpenWakfu/wakfutcp | protocol/src/main/scala/wakfutcp/protocol/common/Proxy.scala | Scala | mit | 619 |
package ru.svd.medan.presentation.web
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import spray.json._
/**
* Created by severian on 21.05.17.
*/
trait JsonSupport extends SprayJsonSupport with DefaultJsonProtocol {
import ru.svd.medan.application._
implicit val ageFormat: RootJsonFormat[Age] = jsonFormat1(Age)
implicit val sexFormat: RootJsonFormat[Sex] = jsonFormat1(Sex)
implicit val patientFormat: RootJsonFormat[Patient] = jsonFormat2(Patient)
implicit val idOptionalFormat = new IdOptionalFormat
implicit val idFormat = new IdFormat
implicit val diagnosisFormat = new DiagnosisFormat
implicit val symptomFormat = new SymptomFormat
implicit val complaintFormat = new ComplaintFormat
implicit val clinicalCaseFormat = new ClinicalCaseFormat
class IdOptionalFormat extends JsonFormat[Option[Id]] {
override def write(obj: Option[Id]): JsValue = obj match {
case Some(id) => JsString(id.value)
case _ => JsNull
}
override def read(json: JsValue): Option[Id] = json match {
case JsString(value) => Some(Id(value))
case _ => deserializationError("expected string value for id")
}
}
class IdFormat extends JsonFormat[Id] {
override def write(obj: Id): JsValue = JsString(obj.value)
override def read(json: JsValue): Id = json match {
case JsString(value) => Id(value)
case _ => deserializationError("expected string value for id")
}
}
class DiagnosisFormat extends RootJsonFormat[Diagnosis] {
override def write(obj: Diagnosis): JsValue = JsObject(
"id" -> idOptionalFormat.write(obj.id),
"name" -> JsString(obj.name)
)
override def read(json: JsValue): Diagnosis = json match {
case o@JsObject(_) =>
Diagnosis(
o.fields.getOrElse("id", JsNull).convertTo[Option[Id]],
o.fields.getOrElse("name", JsNull).convertTo[String]
)
case _ => deserializationError("unexpected json value for Diagnosis type")
}
}
class SymptomFormat extends RootJsonFormat[Symptom] {
override def write(obj: Symptom): JsValue = JsObject(
"id" -> idOptionalFormat.write(obj.id),
"name" -> JsString(obj.name)
)
override def read(json: JsValue): Symptom = json match {
case o@JsObject(_) =>
Symptom(
o.fields.getOrElse("id", JsNull).convertTo[Option[Id]],
o.fields.getOrElse("name", JsNull).convertTo[String]
)
case _ => deserializationError("unexpected json value for Symptom type")
}
}
class ComplaintFormat extends RootJsonFormat[Complaint] {
override def write(obj: Complaint): JsValue = JsObject(
"id" -> idOptionalFormat.write(obj.id),
"name" -> JsString(obj.name)
)
override def read(json: JsValue): Complaint = json match {
case o@JsObject(_) =>
Complaint(
o.fields.getOrElse("id", JsNull).convertTo[Option[Id]],
o.fields.getOrElse("name", JsNull).convertTo[String]
)
case _ => deserializationError("unexpected json value for Complaint type")
}
}
class ClinicalCaseFormat extends RootJsonFormat[ClinicalCase] {
override def write(obj: ClinicalCase): JsValue = obj match {
case ClinicalCase(id, patient, symptoms, complaints, diagnosis) =>
JsObject(
"id" -> id.toJson,
"patient" -> patient.toJson,
"symptoms" -> symptoms.toJson,
"complaints" -> complaints.toJson,
"diagnosis" -> diagnosis.toJson
)
case _ => serializationError("unexpected value for ClinicalCase type")
}
override def read(json: JsValue): ClinicalCase = json match {
case o@JsObject(_) =>
ClinicalCase(
o.fields.getOrElse("id", JsNull).convertTo[Option[Id]],
o.fields.getOrElse("patient", JsNull).convertTo[Patient],
o.fields.getOrElse("symptoms", JsNull).convertTo[List[Id]],
o.fields.getOrElse("complaints", JsNull).convertTo[List[Id]],
o.fields.getOrElse("diagnosis", JsNull).convertTo[Id]
)
case _ => deserializationError("unexpected json value for ClinicalCase type")
}
}
object AllFormats {
def toJson[T](obj: T)(implicit format: JsonFormat[T]): JsValue = {
format.write(obj)
}
}
}
| severiand/medan | src/main/scala/ru/svd/medan/presentation/web/JsonSupport.scala | Scala | apache-2.0 | 4,298 |
/*
* Copyright 2016 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.dnvriend
import org.scalatest.{ FlatSpec, Matchers }
import org.typelevel.scalatest.{ DisjunctionMatchers, ValidationMatchers }
import scala.concurrent.ExecutionContext
abstract class TestSpec extends FlatSpec with Matchers with ValidationMatchers with DisjunctionMatchers {
implicit val ec: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
}
| dnvriend/type-classes-example | src/test/scala/com/github/dnvriend/TestSpec.scala | Scala | apache-2.0 | 986 |
package se.culvertsoft.mgen.visualdesigner.view.autobox2
import se.culvertsoft.mgen.visualdesigner.classlookup.Type2String
import se.culvertsoft.mgen.visualdesigner.control.Controller
import se.culvertsoft.mgen.visualdesigner.model.CustomTypeField
import se.culvertsoft.mgen.visualdesigner.model.EntityId
import se.culvertsoft.mgen.visualdesigner.model.EntityIdBase
object Entity2String {
def short(id: EntityIdBase, controller: Controller): String = {
implicit val model = controller.model
model.getEntity(id) match {
case Some(entity: CustomTypeField) =>
controller.model.parentOf(entity) match {
case Some(clas) => s"${clas.getName()}.${entity.getName()}"
case _ => s"<unknown_class>.${entity.getName()}"
}
case Some(entity) =>
entity.getName()
case _ => ""
}
}
def long(id: EntityIdBase, controller: Controller): String = {
implicit val model = controller.model
model.getEntity(id) match {
case Some(entity) =>
Type2String.getClassPath(entity)
case _ => ""
}
}
}
case class EntityAutoBoxItem(val id: EntityIdBase, controller: Controller) extends AutoBoxItem {
override def deepCopy(): EntityAutoBoxItem = {
EntityAutoBoxItem(id.deepCopy(), controller)
}
override def toString(): String = {
Entity2String.short(id, controller)
}
override def tooltipString(): String = {
Entity2String.long(id, controller)
}
override def removeLast(): EntityAutoBoxItem = {
null
}
override def completeWith(t: AutoBoxItem): AutoBoxItem = {
t.deepCopy()
}
override def isComplete(): Boolean = {
true
}
override def matches(s: String): Boolean = {
s.toLowerCase().startsWith(toString().toLowerCase())
}
override def idString(): String = id.toString()
} | culvertsoft/mgen-visualdesigner | src/main/scala/se/culvertsoft/mgen/visualdesigner/view/autobox2/EntityAutoBoxItem.scala | Scala | gpl-2.0 | 1,822 |
package org.talkingpuffin.util
import org.joda.time.DateTime
case class CachedExpandedUrl(lastUsed: DateTime, url: Option[String])
| dcbriccetti/talking-puffin | common/src/main/scala/org/talkingpuffin/util/CachedExpandedUrl.scala | Scala | mit | 133 |
package tap.types.kinds
import tap.Id
import tap.types._
import scala.annotation.tailrec
sealed trait Kind
case object Star extends Kind
case class Kfun(x: Kind, y: Kind) extends Kind
case class Kvar(q: Id, id: String) extends Kind
object Kind {
/**
* Finds the kind of a type.
*/
def kind(t: Type): Kind = t match {
case TVar(_, k) => k
case TCon(_, k) => k
case Forall(_, _, t) => kind(t)
case TAp(t, _) => kind(t) match {
case Kfun(_, k) => k
case _ => throw new Error("kind * found on TAp type")
}
case _: TGen => throw new Error("kind called on TGen")
}
@tailrec final def arity(k: Kind, depth: Int = 0): Int = k match {
case Star => depth
case Kfun(_, k) => arity(k, depth + 1)
case _: Kvar => throw new Error("arity called on Kvar")
}
} | garyb/tap | src/main/scala/tap/types/kinds/Kind.scala | Scala | mit | 874 |
package mimir.sql;
import java.sql.{ResultSet,ResultSetMetaData,Blob,Clob,SQLFeatureNotSupportedException,NClob,Ref,RowId,SQLXML,Statement,Time,Timestamp,SQLWarning,SQLException};
import java.net.URL
import java.io._;
import java.util.{Calendar};
import collection.JavaConversions._;
import net.sf.jsqlparser.statement.select.Select;
import net.sf.jsqlparser.statement.create.table.CreateTable;
import net.sf.jsqlparser.parser.CCJSqlParser;
import edu.buffalo.cse562.Schema
import edu.buffalo.cse562.optimizer.Optimizer
import edu.buffalo.cse562.eval.{PlanCompiler,Operator}
import edu.buffalo.cse562.data.Datum
import mimir.algebra.Type;
class CSVMetaData(nameIDs: java.util.Map[String, Int], sch: List[Int]) extends ResultSetMetaData
{
val names = nameIDs.toList.sortBy(_._2)( new Ordering[Int]() {
override def compare(x: Int, y: Int) = { x compareTo y }
}).map(_._1).toList
def getCatalogName(x: Int): String = "Joe"
def getColumnClassName(x: Int): String = "Joe"
def getColumnCount(): Int = sch.length
def getColumnDisplaySize(x: Int): Int = 20
def getColumnLabel(x: Int): String = names(x)
def getColumnName(x: Int): String = names(x)
def getColumnType(x: Int): Int = sch(x)
def getColumnTypeName(x: Int): String = sch(x) match {
case java.sql.Types.BOOLEAN => "BOOLEAN"
case java.sql.Types.INTEGER => "INTEGER"
case java.sql.Types.DECIMAL => "DECIMAL"
case java.sql.Types.DATE => "DATE"
case java.sql.Types.CHAR => "CHAR"
}
def getPrecision(x: Int): Int = 100
def getScale(x: Int): Int = 100
def getSchemaName(x: Int): String = "Joe"
def getTableName(x: Int): String = "Jim"
def isAutoIncrement(x: Int): Boolean = false
def isCaseSensitive(x: Int): Boolean = true
def isCurrency(x: Int): Boolean = false
def isDefinitelyWritable(x: Int): Boolean = false
def isNullable(x: Int): Int = ResultSetMetaData.columnNullable
def isReadOnly(x: Int): Boolean = true
def isSearchable(x: Int): Boolean = true
def isSigned(x: Int): Boolean = true
def isWritable(x: Int): Boolean = false
def isWrapperFor(x: Class[_]): Boolean = false
def unwrap[T](x: Class[T]): T = sys.error("Unimplemented: Unwrap on CSV Result Set")
}
class CSVResults(src: Operator, names: java.util.Map[String, Int], sch: List[Int]) extends ResultSet
{
var done = false;
var row: Array[Datum] = null;
var idx = 0;
def feature(msg: String) =
throw new SQLFeatureNotSupportedException(msg);
def absolute(row: Int): Boolean = feature("absolute")
def afterLast(): Unit = feature("afterLast")
def beforeFirst(): Unit = feature("beforeFirst")
def cancelRowUpdates(): Unit = feature("cancelRowUpdates")
def clearWarnings(): Unit = feature("clearWarnings")
def close(): Unit = { done = true; }
def deleteRow(): Unit = feature("deleteRow")
def findColumn(columnLabel: String): Int = names.get(columnLabel)
def first(): Boolean = feature("first")
def getArray(columnIndex: Int): java.sql.Array = feature("array")
def getArray(columnLabel: String): java.sql.Array = feature("array")
def getAsciiStream(columnIndex: Int): InputStream = feature("getAsciiStream")
def getAsciiStream(columnLabel: String): InputStream = feature("getAsciiStream")
def getBigDecimal(columnIndex: Int): java.math.BigDecimal = feature("getBigDecimal")
def getBigDecimal(columnIndex: Int, scale: Int): java.math.BigDecimal = feature("getBigDecimal")
def getBigDecimal(columnLabel: String): java.math.BigDecimal = feature("getBigDecimal")
def getBigDecimal(columnLabel: String, scale: Int): java.math.BigDecimal = feature("getBigDecimal")
def getBinaryStream(columnIndex: Int): InputStream = feature("getBinaryStream")
def getBinaryStream(columnLabel: String): InputStream = feature("getBinaryStream")
def getBlob(columnIndex: Int): Blob = feature("getBlob")
def getBlob(columnLabel: String): Blob = feature("getBlob")
def getBoolean(columnIndex: Int): Boolean = row(columnIndex-1).toBool()
def getBoolean(columnLabel: String): Boolean = getBoolean(findColumn(columnLabel))
def getByte(columnIndex: Int): Byte = row(columnIndex-1).toInt().toByte
def getByte(columnLabel: String): Byte = getByte(findColumn(columnLabel))
def getBytes(columnIndex: Int): Array[Byte] = row(columnIndex-1).toString().getBytes()
def getBytes(columnLabel: String): Array[Byte] = getBytes(findColumn(columnLabel))
def getCharacterStream(columnIndex: Int): Reader = feature("getCharacterStream")
def getCharacterStream(columnLabel: String): Reader = feature("getCharacterStream")
def getClob(columnIndex: Int): Clob = feature("getClob")
def getClob(columnLabel: String): Clob = feature("getClob")
def getConcurrency(): Int = ResultSet.CONCUR_READ_ONLY
def getCursorName(): String = "Bob"
def getDate(columnIndex: Int): java.sql.Date = {
row(columnIndex-1) match {
case d: Datum.Date =>
return java.sql.Date.valueOf(d.toString())
}
}
def getDate(columnIndex: Int, cal: java.util.Calendar): java.sql.Date = getDate(columnIndex-1);
def getDate(columnLabel: String): java.sql.Date = getDate(findColumn(columnLabel));
def getDate(columnLabel: String, cal: java.util.Calendar): java.sql.Date = getDate(findColumn(columnLabel), cal);
def getDouble(columnIndex: Int): Double = row(columnIndex-1).toFloat()
def getDouble(columnLabel: String): Double = getDouble(findColumn(columnLabel))
def getFetchDirection(): Int = ResultSet.FETCH_FORWARD
def getFetchSize(): Int = 1
def getFloat(columnIndex: Int): Float = row(columnIndex-1).toInt()
def getFloat(columnLabel: String): Float = getFloat(findColumn(columnLabel))
def getHoldability(): Int = ResultSet.CLOSE_CURSORS_AT_COMMIT
def getInt(columnIndex: Int): Int = row(columnIndex-1).toInt()
def getInt(columnLabel: String): Int = getInt(findColumn(columnLabel))
def getLong(columnIndex: Int): Long = row(columnIndex-1).toInt()
def getLong(columnLabel: String): Long = getLong(findColumn(columnLabel))
def getMetaData(): ResultSetMetaData = new CSVMetaData(names,sch)
def getNCharacterStream(columnIndex: Int): Reader = feature("getNCharacterStream")
def getNCharacterStream(columnLabel: String): Reader = feature("getNCharacterStream")
def getNClob(columnIndex: Int): NClob = feature("getNClob")
def getNClob(columnLabel: String): NClob = feature("getNClob")
def getNString(columnIndex: Int): String = feature("getNString")
def getNString(columnLabel: String): String = feature("getNString")
def getObject(columnIndex: Int): Object = feature("getObject")
def getObject[T](columnIndex: Int, ty: Class[T]): T = feature("getObject")
def getObject(columnIndex: Int, map: java.util.Map[String, Class[_]]): Object = feature("getObject")
def getObject(columnLabel: String): Object = feature("getObject")
def getObject[T](columnLabel: String, ty: Class[T]): T = feature("getObject")
def getObject(columnLabel: String, map: java.util.Map[String, Class[_]]): Object = feature("getObject")
def getRef(columnIndex: Int): Ref = feature("getRef")
def getRef(columnLabel: String): Ref = feature("getRef")
def getRow(): Int = idx
def getRowId(columnIndex: Int): RowId = feature("getRowId")
def getRowId(columnLabel: String): RowId = feature("getRowId")
def getShort(columnIndex: Int): Short = row(columnIndex-1).toInt().toShort
def getShort(columnLabel: String): Short = getShort(findColumn(columnLabel))
def getSQLXML(columnIndex: Int): SQLXML = feature("getSQLXML")
def getSQLXML(columnLabel: String): SQLXML = feature("getSQLXML")
def getStatement(): Statement = null
def getString(columnIndex: Int): String = row(columnIndex-1).toString()
def getString(columnLabel: String): String = getString(findColumn(columnLabel))
def getTime(columnIndex: Int): Time = feature("getTime")
def getTime(columnIndex: Int, cal: Calendar): Time = feature("getTime")
def getTime(columnLabel: String): Time = feature("getTime")
def getTime(columnLabel: String, cal: Calendar): Time = feature("getTime")
def getTimestamp(columnIndex: Int): Timestamp = feature("getTimestamp")
def getTimestamp(columnIndex: Int, cal: Calendar): Timestamp = feature("getTimestamp")
def getTimestamp(columnLabel: String): Timestamp = feature("getTimestamp")
def getTimestamp(columnLabel: String, cal: Calendar): Timestamp = feature("getTimestamp")
def getType(): Int = ResultSet.TYPE_FORWARD_ONLY
def getUnicodeStream(columnIndex: Int): InputStream = feature("getUnicodeStream")
def getUnicodeStream(columnLabel: String): InputStream = feature("getUnicodeStream")
def getURL(columnIndex: Int): URL = feature("getURL")
def getURL(columnLabel: String): URL = feature("getURL")
def getWarnings(): SQLWarning = null
def insertRow(): Unit = feature("insertRow")
def isAfterLast(): Boolean = (row == null)
def isBeforeFirst(): Boolean = false;
def isClosed(): Boolean = (done)
def isFirst(): Boolean = (idx == 0)
def isLast(): Boolean = src.done()
def last(): Boolean = feature("last")
def moveToCurrentRow(): Unit = feature("moveToCurrentRow")
def moveToInsertRow(): Unit = feature("moveToInsertRow")
def next(): Boolean = { row = src.read(); return row != null }
def previous(): Boolean = feature("previous")
def refreshRow(): Unit = {}
def relative(rows: Int): Boolean = feature("relative")
def rowDeleted(): Boolean = false
def rowInserted(): Boolean = false
def rowUpdated(): Boolean = false
def setFetchDirection(direction: Int): Unit = feature("setFetchDirection")
def setFetchSize(rows: Int): Unit = feature("setFetchSize")
def updateArray(columnIndex: Int, x: java.sql.Array): Unit = feature("updateArray")
def updateArray(columnLabel: String, x: java.sql.Array): Unit = feature("updateArray")
def updateAsciiStream(columnIndex: Int, x: InputStream): Unit = feature("updateAsciiStream")
def updateAsciiStream(columnIndex: Int, x: InputStream, length: Int): Unit = feature("updateAsciiStream")
def updateAsciiStream(columnIndex: Int, x: InputStream, length: Long): Unit = feature("updateAsciiStream")
def updateAsciiStream(columnLabel: String, x: InputStream): Unit = feature("updateAsciiStream")
def updateAsciiStream(columnLabel: String, x: InputStream, length: Int): Unit = feature("updateAsciiStream")
def updateAsciiStream(columnLabel: String, x: InputStream, length: Long): Unit = feature("updateAsciiStream")
def updateBigDecimal(columnIndex: Int, x: java.math.BigDecimal): Unit = feature("updateBigDecimal")
def updateBigDecimal(columnLabel: String, x: java.math.BigDecimal): Unit = feature("updateBigDecimal")
def updateBinaryStream(columnIndex: Int, x: InputStream): Unit = feature("updateBinaryStream")
def updateBinaryStream(columnIndex: Int, x: InputStream, length: Int): Unit = feature("updateBinaryStream")
def updateBinaryStream(columnIndex: Int, x: InputStream, length: Long): Unit = feature("updateBinaryStream")
def updateBinaryStream(columnLabel: String, x: InputStream): Unit = feature("updateBinaryStream")
def updateBinaryStream(columnLabel: String, x: InputStream, length: Int): Unit = feature("updateBinaryStream")
def updateBinaryStream(columnLabel: String, x: InputStream, length: Long): Unit = feature("updateBinaryStream")
def updateBlob(columnIndex: Int, x: Blob): Unit = feature("updateBlob")
def updateBlob(columnIndex: Int, inputStream: InputStream): Unit = feature("updateBlob")
def updateBlob(columnIndex: Int, inputStream: InputStream, length: Long): Unit = feature("updateBlob")
def updateBlob(columnLabel: String, x: Blob): Unit = feature("updateBlob")
def updateBlob(columnLabel: String, inputStream: InputStream): Unit = feature("updateBlob")
def updateBlob(columnLabel: String, inputStream: InputStream, length: Long): Unit = feature("updateBlob")
def updateBoolean(columnIndex: Int, x: Boolean): Unit = feature("updateBoolean")
def updateBoolean(columnLabel: String, x: Boolean): Unit = feature("updateBoolean")
def updateByte(columnIndex: Int, x: Byte): Unit = feature("updateByte")
def updateByte(columnLabel: String, x: Byte): Unit = feature("updateByte")
def updateBytes(columnIndex: Int, x: Array[Byte]): Unit = feature("updateBytes")
def updateBytes(columnLabel: String, x: Array[Byte]): Unit = feature("updateBytes")
def updateCharacterStream(columnIndex: Int, x: Reader): Unit = feature("updateCharacterStream")
def updateCharacterStream(columnIndex: Int, x: Reader, length: Int): Unit = feature("updateCharacterStream")
def updateCharacterStream(columnIndex: Int, x: Reader, length: Long): Unit = feature("updateCharacterStream")
def updateCharacterStream(columnLabel: String, reader: Reader): Unit = feature("updateCharacterStream")
def updateCharacterStream(columnLabel: String, reader: Reader, length: Int): Unit = feature("updateCharacterStream")
def updateCharacterStream(columnLabel: String, reader: Reader, length: Long): Unit = feature("updateCharacterStream")
def updateClob(columnIndex: Int, x: Clob): Unit = feature("updateClob")
def updateClob(columnIndex: Int, reader: Reader): Unit = feature("updateClob")
def updateClob(columnIndex: Int, reader: Reader, length: Long): Unit = feature("updateClob")
def updateClob(columnLabel: String, x: Clob): Unit = feature("updateClob")
def updateClob(columnLabel: String, reader: Reader): Unit = feature("updateClob")
def updateClob(columnLabel: String, reader: Reader, length: Long): Unit = feature("updateClob")
def updateDate(columnIndex: Int, x: java.sql.Date): Unit = feature("updateDate")
def updateDate(columnLabel: String, x: java.sql.Date): Unit = feature("updateDate")
def updateDouble(columnIndex: Int, x: Double): Unit = feature("updateDouble")
def updateDouble(columnLabel: String, x: Double): Unit = feature("updateDouble")
def updateFloat(columnIndex: Int, x: Float): Unit = feature("updateFloat")
def updateFloat(columnLabel: String, x: Float): Unit = feature("updateFloat")
def updateInt(columnIndex: Int, x: Int): Unit = feature("updateInt")
def updateInt(columnLabel: String, x: Int): Unit = feature("updateInt")
def updateLong(columnIndex: Int, x: Long): Unit = feature("updateLong")
def updateLong(columnLabel: String, x: Long): Unit = feature("updateLong")
def updateNCharacterStream(columnIndex: Int, x: Reader): Unit = feature("updateNCharacterStream")
def updateNCharacterStream(columnIndex: Int, x: Reader, length: Long): Unit = feature("updateNCharacterStream")
def updateNCharacterStream(columnLabel: String, reader: Reader): Unit = feature("updateNCharacterStream")
def updateNCharacterStream(columnLabel: String, reader: Reader, length: Long): Unit = feature("updateNCharacterStream")
def updateNClob(columnIndex: Int, nClob: NClob): Unit = feature("updateNClob")
def updateNClob(columnIndex: Int, reader: Reader): Unit = feature("updateNClob")
def updateNClob(columnIndex: Int, reader: Reader, length: Long): Unit = feature("updateNClob")
def updateNClob(columnLabel: String, nClob: NClob): Unit = feature("updateNClob")
def updateNClob(columnLabel: String, reader: Reader): Unit = feature("updateNClob")
def updateNClob(columnLabel: String, reader: Reader, length: Long): Unit = feature("updateNClob")
def updateNString(columnIndex: Int, nString: String): Unit = feature("updateNString")
def updateNString(columnLabel: String, nString: String): Unit = feature("updateNString")
def updateNull(columnIndex: Int): Unit = feature("updateNull")
def updateNull(columnLabel: String): Unit = feature("updateNull")
def updateObject(columnIndex: Int, x: Object): Unit = feature("updateObject")
def updateObject(columnIndex: Int, x: Object, scaleOrLength: Int): Unit = feature("updateObject")
def updateObject(columnLabel: String, x: Object): Unit = feature("updateObject")
def updateObject(columnLabel: String, x: Object, scaleOrLength: Int): Unit = feature("updateObject")
def updateRef(columnIndex: Int, x: Ref): Unit = feature("updateRef")
def updateRef(columnLabel: String, x: Ref): Unit = feature("updateRef")
def updateRow(): Unit = feature("updateRow")
def updateRowId(columnIndex: Int, x: RowId): Unit = feature("updateRowId")
def updateRowId(columnLabel: String, x: RowId): Unit = feature("updateRowId")
def updateShort(columnIndex: Int, x: Short): Unit = feature("updateShort")
def updateShort(columnLabel: String, x: Short): Unit = feature("updateShort")
def updateSQLXML(columnIndex: Int, xmlObject: SQLXML): Unit = feature("updateSQLXML")
def updateSQLXML(columnLabel: String, xmlObject: SQLXML): Unit = feature("updateSQLXML")
def updateString(columnIndex: Int, x: String): Unit = feature("updateString")
def updateString(columnLabel: String, x: String): Unit = feature("updateString")
def updateTime(columnIndex: Int, x: Time): Unit = feature("updateTime")
def updateTime(columnLabel: String, x: Time): Unit = feature("updateTime")
def updateTimestamp(columnIndex: Int, x: Timestamp): Unit = feature("updateTimestamp")
def updateTimestamp(columnLabel: String, x: Timestamp): Unit = feature("updateTimestamp")
def wasNull(): Boolean = feature("wasNull");
def isWrapperFor(x: Class[_]): Boolean = false
def unwrap[T](x: Class[T]): T = sys.error("Unimplemented: Unwrap on CSV Result Set")
}
object CSV {
def apply(dataDir: String, schemaFile: String) =
{
val mkDB = new Schema.Database();
val mkTranslator = new edu.buffalo.cse562.sql.SqlToRA(mkDB)
mkDB.dataDir = new File(dataDir);
val parser = new CCJSqlParser(new FileReader(schemaFile))
var table: CreateTable = parser.CreateTable();
while(table != null){
mkTranslator.loadTableSchema(table);
table = parser.CreateTable()
}
new CSVBackend(mkDB, mkTranslator, new PlanCompiler(mkDB))
}
}
class CSVBackend(
db: Schema.Database,
translator: edu.buffalo.cse562.sql.SqlToRA,
compiler: PlanCompiler
) extends Backend {
def execute(sel: String): ResultSet =
{
execute(new CCJSqlParser(new StringReader(sel)).Select());
}
def execute(sel: String, args: List[String]): ResultSet =
{
throw new SQLException("No support for prepared queries");
}
override def execute(sel: Select): ResultSet =
{
var plan = translator.selectToPlan(sel.getSelectBody())
plan = Optimizer.optimize(plan, db)
val sch = compiler.computeSchema(plan);
var i = 0;
new CSVResults(
compiler.compile(plan),
sch.map( (col) => { i += 1; ( col.getName(), i) } ).toMap[String,Int],
sch.types().map( _ match {
case Schema.Type.BOOL => java.sql.Types.BOOLEAN
case Schema.Type.INT => java.sql.Types.INTEGER
case Schema.Type.FLOAT => java.sql.Types.DECIMAL
case Schema.Type.DATE => java.sql.Types.DATE
case Schema.Type.STRING => java.sql.Types.CHAR
}).toList
)
}
def update(op: String): Unit =
{
throw new SQLException("No support for updates on CSV data");
}
def update(upd: List[String]): Unit =
{
throw new SQLException("No support for updates on CSV data");
}
def update(op: String, args: List[String]): Unit =
{
throw new SQLException("No support for updates on CSV data");
}
def getTableSchema(table: String): Option[List[(String, Type.T)]] =
{
throw new SQLException("No support for schemas on CSV data");
}
def getAllTables() =
{
throw new SQLException("No support for all tables on CSV data");
}
def close() = {
}
}
| Legacy25/mimir | mimircore/src/main/scala/mimir/sql/CSV.scala | Scala | apache-2.0 | 19,439 |
/* Copyright (C) 2011 Mikołaj Sochacki mikolajsochacki AT gmail.com
* This file is part of VRegister (Virtual Register - Wirtualny Dziennik)
* LICENCE: GNU AFFERO GENERAL PUBLIC LICENS Version 3 (AGPLv3)
* See: <http://www.gnu.org/licenses/>.
*/
package eu.brosbit.opos.model
import net.liftweb.mapper._
import _root_.net.liftweb.util._
import _root_.net.liftweb.common._
class SubjectChangeList extends LongKeyedMapper[SubjectChangeList] with IdPK {
def getSingleton = SubjectChangeList
object name extends MappedString(this, 40)
object short extends MappedString(this, 5)
object nr extends MappedInt(this)
object date extends MappedDate(this)
}
object SubjectChangeList extends SubjectChangeList with LongKeyedMetaMapper[SubjectChangeList] {
}
| mikolajs/osp | src/main/scala/eu/brosbit/opos/model/SubjectChangeList.scala | Scala | agpl-3.0 | 779 |
package com.github.j5ik2o.forseti.domain.auhtorizationCode
import scala.concurrent.{ExecutionContext, Future}
import scalaz.EitherT
trait AuthorizationCodeWriter {
def store(entity: AuthorizationCode)(
implicit ec: ExecutionContext
): EitherT[Future, Exception, Unit]
def deleteById(id: AuthorizationCodeId)(
implicit ec: ExecutionContext
): EitherT[Future, Exception, Unit]
def deleteByValue(value: String)(implicit ec: ExecutionContext): EitherT[Future, Exception, Unit]
}
| j5ik2o/forseti | domain/src/main/scala/com/github/j5ik2o/forseti/domain/auhtorizationCode/AuthorizationCodeWriter.scala | Scala | mit | 503 |
package com.arcusys.valamis.util.mustache
case class MustacheParseException(line: Int, msg: String)
extends Exception("Line " + line + ": " + msg)
| ViLPy/Valamis | valamis-util/src/main/scala/com/arcusys/valamis/util/mustache/MustacheParseException.scala | Scala | lgpl-3.0 | 150 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.utils.serializer.converters
import com.intel.analytics.bigdl.dllib.nn.VariableFormat
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.serializer.{DeserializeContext, SerializeContext}
import com.intel.analytics.bigdl.serialization.Bigdl.{AttrValue, DataType, VarFormat}
import scala.reflect.ClassTag
import scala.reflect.runtime.universe
/**
* DataConverter for [[com.intel.analytics.bigdl.dllib.nn.VariableFormat]]
*/
object VariableFormatConverter extends DataConverter {
override def getAttributeValue[T: ClassTag](context: DeserializeContext, attribute: AttrValue)
(implicit ev: TensorNumeric[T]): AnyRef = {
val format = attribute.getVariableFormatValue
format match {
case VarFormat.DEFAULT => VariableFormat.Default
case VarFormat.ONE_D => VariableFormat.ONE_D
case VarFormat.IN_OUT => VariableFormat.IN_OUT
case VarFormat.OUT_IN => VariableFormat.OUT_IN
case VarFormat.IN_OUT_KW_KH => VariableFormat.IN_OUT_KW_KH
case VarFormat.OUT_IN_KW_KH => VariableFormat.OUT_IN_KW_KH
case VarFormat.GP_OUT_IN_KW_KH => VariableFormat.GP_OUT_IN_KW_KH
case VarFormat.GP_IN_OUT_KW_KH => VariableFormat.GP_IN_OUT_KW_KH
case VarFormat.OUT_IN_KT_KH_KW => VariableFormat.OUT_IN_KT_KH_KW
case VarFormat.EMPTY_FORMAT => null
}
}
override def setAttributeValue[T: ClassTag](
context: SerializeContext[T], attributeBuilder: AttrValue.Builder,
value: Any, valueType: universe.Type = null)(implicit ev: TensorNumeric[T]): Unit = {
attributeBuilder.setDataType(DataType.VARIABLE_FORMAT)
if (value != null) {
val format = value.asInstanceOf[VariableFormat]
val formatValue = format match {
case VariableFormat.Default => VarFormat.DEFAULT
case VariableFormat.ONE_D => VarFormat.ONE_D
case VariableFormat.IN_OUT => VarFormat.IN_OUT
case VariableFormat.OUT_IN => VarFormat.OUT_IN
case VariableFormat.IN_OUT_KW_KH => VarFormat.IN_OUT_KW_KH
case VariableFormat.OUT_IN_KW_KH => VarFormat.OUT_IN_KW_KH
case VariableFormat.GP_OUT_IN_KW_KH => VarFormat.GP_OUT_IN_KW_KH
case VariableFormat.GP_IN_OUT_KW_KH => VarFormat.GP_IN_OUT_KW_KH
case VariableFormat.OUT_IN_KT_KH_KW => VarFormat.OUT_IN_KT_KH_KW
}
attributeBuilder.setVariableFormatValue(formatValue)
} else {
attributeBuilder.setVariableFormatValue(VarFormat.EMPTY_FORMAT)
}
}
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/utils/serializer/converters/VariableFormatConverter.scala | Scala | apache-2.0 | 3,180 |
package org.cakesolutions.akkapatterns.api
import akka.actor.ActorSystem
import cc.spray.Directives
import org.cakesolutions.akkapatterns.domain.User
import org.cakesolutions.akkapatterns.core.application.{NotRegisteredUser, RegisteredUser}
import akka.pattern.ask
/**
* @author janmachacek
*/
class UserService(implicit val actorSystem: ActorSystem) extends Directives with Marshallers with Unmarshallers with DefaultTimeout with LiftJSON {
def userActor = actorSystem.actorFor("/user/application/user")
val route =
path("user" / "register") {
post {
content(as[User]) { user =>
completeWith((userActor ? RegisteredUser(user)).mapTo[Either[NotRegisteredUser, RegisteredUser]])
}
}
}
}
| anand-singh/akka-patterns | maven/api/src/main/scala/org/cakesolutions/akkapatterns/api/user.scala | Scala | apache-2.0 | 742 |
package is.hail.utils
import is.hail.backend.ExecuteContext
import is.hail.backend.spark.SparkBackend
import is.hail.io.fs.FS
import org.apache.spark.rdd.RDD
import java.io.{ObjectInputStream, ObjectOutputStream}
import scala.reflect.{ClassTag, classTag}
object SpillingCollectIterator {
def apply[T: ClassTag](localTmpdir: String, fs: FS, rdd: RDD[T], sizeLimit: Int): SpillingCollectIterator[T] = {
val nPartitions = rdd.partitions.length
val x = new SpillingCollectIterator(localTmpdir, fs, nPartitions, sizeLimit)
val ctc = classTag[T]
SparkBackend.sparkContext("SpillingCollectIterator.apply").runJob(
rdd,
(_, it: Iterator[T]) => it.toArray(ctc),
0 until nPartitions,
x.append _)
x
}
}
class SpillingCollectIterator[T: ClassTag] private (localTmpdir: String, fs: FS, nPartitions: Int, sizeLimit: Int) extends Iterator[T] {
private[this] val files: Array[(String, Long)] = new Array(nPartitions)
private[this] val buf: Array[Array[T]] = new Array(nPartitions)
private[this] var _size: Long = 0L
private[this] var i: Int = 0
private[this] var it: Iterator[T] = null
private def append(partition: Int, a: Array[T]): Unit = synchronized {
assert(buf(partition) == null)
buf(partition) = a
_size += a.length
if (_size > sizeLimit) {
val file = ExecuteContext.createTmpPathNoCleanup(localTmpdir, s"spilling-collect-iterator-$partition")
log.info(s"spilling partition $partition to $file")
using(fs.createNoCompression(file)) { os =>
var k = 0
while (k < buf.length) {
val vals = buf(k)
if (vals != null) {
buf(k) = null
val pos = os.getPosition
val oos = new ObjectOutputStream(os)
oos.writeInt(vals.length)
var j = 0
while (j < vals.length) {
oos.writeObject(vals(j))
j += 1
}
files(k) = (file, pos)
oos.flush()
}
k += 1
}
}
_size = 0
}
}
def hasNext: Boolean = {
if (it == null || !it.hasNext) {
if (i >= files.length) {
it = null
return false
} else if (files(i) == null) {
assert(buf(i) != null)
it = buf(i).iterator
buf(i) = null
} else {
val (filename, pos) = files(i)
using(fs.openNoCompression(filename)) { is =>
is.seek(pos)
using(new ObjectInputStream(is)) { ois =>
val length = ois.readInt()
val arr = new Array[T](length)
var j = 0
while (j < length) {
arr(j) = ois.readObject().asInstanceOf[T]
j += 1
}
it = arr.iterator
}
}
}
i += 1
}
it.hasNext
}
def next: T = {
hasNext
it.next
}
}
| hail-is/hail | hail/src/main/scala/is/hail/utils/SpillingCollectIterator.scala | Scala | mit | 2,870 |
/*
* Part of NDLA image-api.
* Copyright (C) 2016 NDLA
*
* See LICENSE
*
*/
package no.ndla.imageapi
import com.amazonaws.services.s3.AmazonS3
import com.zaxxer.hikari.HikariDataSource
import no.ndla.imageapi.auth.{Role, User}
import no.ndla.imageapi.controller.{HealthController, ImageControllerV2, InternController, RawController}
import no.ndla.imageapi.integration._
import no.ndla.imageapi.repository._
import no.ndla.imageapi.service._
import no.ndla.imageapi.service.search.{
ImageIndexService,
ImageSearchService,
IndexService,
SearchConverterService,
SearchService,
TagIndexService,
TagSearchService
}
import no.ndla.network.NdlaClient
import org.mockito.scalatest.MockitoSugar
trait TestEnvironment
extends Elastic4sClient
with IndexService
with TagIndexService
with SearchService
with ImageSearchService
with TagSearchService
with SearchConverterService
with DataSource
with ConverterService
with ValidationService
with ImageRepository
with ReadService
with WriteService
with AmazonClient
with ImageStorageService
with ImageIndexService
with DraftApiClient
with NdlaClient
with InternController
with ImageControllerV2
with HealthController
with RawController
with TagsService
with ImageConverter
with MockitoSugar
with User
with Role
with Clock {
val amazonClient = mock[AmazonS3]
val dataSource = mock[HikariDataSource]
val imageIndexService = mock[ImageIndexService]
val imageSearchService = mock[ImageSearchService]
val tagIndexService = mock[TagIndexService]
val tagSearchService = mock[TagSearchService]
val imageRepository = mock[ImageRepository]
val readService = mock[ReadService]
val writeService = mock[WriteService]
val imageStorage = mock[AmazonImageStorageService]
val ndlaClient = mock[NdlaClient]
val draftApiClient = mock[DraftApiClient]
val rawController = mock[RawController]
val internController = mock[InternController]
val imageControllerV2 = mock[ImageControllerV2]
val converterService = mock[ConverterService]
val validationService = mock[ValidationService]
val tagsService = mock[TagsService]
val e4sClient = mock[NdlaE4sClient]
val searchConverterService = mock[SearchConverterService]
val imageConverter = mock[ImageConverter]
val healthController = mock[HealthController]
val clock = mock[SystemClock]
val authUser = mock[AuthUser]
val authRole = mock[AuthRole]
}
| NDLANO/image-api | src/test/scala/no/ndla/imageapi/TestEnvironment.scala | Scala | gpl-3.0 | 2,493 |
import akka.actor.{ActorLogging, Props, Actor, ActorRef}
import com.ning.http.client.AsyncHttpClientConfig.Builder
import play.api.libs.json.Json
import play.api.libs.ws.ning.NingWSClient
import UserHandler.Ticker
class DataFetcher(broadcaster: ActorRef) extends Actor with ActorLogging {
override def receive: Receive = {
case DataFetcher.Tick =>
client.url(url).get().map { response =>
if (response.status == 200) {
val ticker = Json.parse(response.body).as[Ticker]
log.debug(s"Broadcasting ticker $ticker")
broadcaster ! ticker
}
}.onFailure { case t => log.warning(s"Requesting ticker failed because ${t.getMessage}") }
}
private implicit val tickerFormat = Json.format[Ticker]
private implicit val dispatcher = context.dispatcher
private val url = "https://bitbay.net/API/Public/BTCUSD/ticker.json"
private val client = new NingWSClient(new Builder().build())
}
object DataFetcher {
case object Tick
def props(broadcaster: ActorRef): Props = Props(new DataFetcher(broadcaster))
}
| theiterators/reactive-microservices | btc-users/src/main/scala/DataFetcher.scala | Scala | mit | 1,067 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.client
import org.apache.spark.rpc.RpcEnv
import org.apache.spark.{SecurityManager, SparkConf, Logging}
import org.apache.spark.deploy.{ApplicationDescription, Command}
import org.apache.spark.util.Utils
private[spark] object TestClient {
private class TestListener extends AppClientListener with Logging {
def connected(id: String) {
logInfo("Connected to master, got app ID " + id)
}
def disconnected() {
logInfo("Disconnected from master")
System.exit(0)
}
def dead(reason: String) {
logInfo("Application died with error: " + reason)
System.exit(0)
}
def executorAdded(id: String, workerId: String, hostPort: String, cores: Int, memory: Int) {}
def executorRemoved(id: String, message: String, exitStatus: Option[Int]) {}
}
def main(args: Array[String]) {
val url = args(0)
val conf = new SparkConf
val rpcEnv = RpcEnv.create("spark", Utils.localHostName(), 0, conf, new SecurityManager(conf))
val desc = new ApplicationDescription("TestClient", Some(1), 512,
Command("spark.deploy.client.TestExecutor", Seq(), Map(), Seq(), Seq(), Seq()), "ignored")
val listener = new TestListener
val client = new AppClient(rpcEnv, Array(url), desc, listener, new SparkConf)
client.start()
rpcEnv.awaitTermination()
}
}
| ArvinDevel/onlineAggregationOnSparkV2 | core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala | Scala | apache-2.0 | 2,159 |
package controllers
import global.Globals
import models.Skill
import services.ApiServices
import javax.inject._
import play.api._
import play.api.mvc._
import play.api.libs.json._
import io.swagger.core._
import io.swagger.annotations._
import play.api.libs.concurrent.Execution.Implicits._
@Api(value = "/skills", description = "Skill Information")
class SkillController @Inject() (apiServices: ApiServices) extends ApiModelController(Skill, apiServices.skillService) {
@ApiOperation(value = "Retrieves List of Skills", response = classOf[Skill], responseContainer = "List", httpMethod = "GET")
def get(userSlug: Option[String]) = CORSAction {
val slug = userSlug.getOrElse(Globals.defaultUserSlug)
apiServices.skillService.findByUserSlug(slug).map(s => Ok(Json.toJson(s)))
}
}
| gilbertw1/personal-api | app/controllers/SkillController.scala | Scala | gpl-2.0 | 799 |
package com.sksamuel.scapegoat.inspections.collections
import com.sksamuel.scapegoat.InspectionTest
/** @author Stephen Samuel */
class AvoidSizeEqualsZeroTest extends InspectionTest {
override val inspections = Seq(new AvoidSizeEqualsZero)
"collection.size == 0" - {
"should report warning" in {
val code = """object Test {
val isEmpty1 = List(1).size == 0
val isEmpty2 = List(1).length == 0
val isEmpty3 = Set(1).size == 0
val isEmpty5 = Seq(1).size == 0
val isEmpty6 = Seq(1).length == 0
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 5
}
// github issue #94
"should ignore durations" in {
val code = """object Test {
|case class Duration(start: Long, stop: Long) {
| def length: Long = stop - start
| def isEmpty: Boolean = length == 0
| }
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
}
}
| sksamuel/scapegoat | src/test/scala/com/sksamuel/scapegoat/inspections/collections/AvoidSizeEqualsZeroTest.scala | Scala | apache-2.0 | 1,190 |
package com.github.kaeluka.spencer
import java.io._
import java.nio.file.{Path, Paths}
import java.sql.{Connection, DriverManager, ResultSet}
import java.util.concurrent.TimeUnit
import java.util.{Calendar, EmptyStackException}
import com.github.kaeluka.spencer.Events.{AnyEvt, LateInitEvt, ReadModifyEvt}
import com.github.kaeluka.spencer.analysis._
import com.github.kaeluka.spencer.tracefiles.{EventsUtil, TraceFiles}
import com.google.common.base.Stopwatch
import org.apache.commons.io.FileUtils
import org.postgresql.util.PSQLException
import scala.collection.JavaConversions._
object PostgresSpencerDBs {
def getAvailableBenchmarks(): Seq[BenchmarkMetaInfo] = {
val conn = DriverManager.getConnection("jdbc:postgresql://postgres/template1", System.getenv("POSTGRES_USER"), System.getenv("POSTGRES_PASSWORD"))
var benchmarks = List[BenchmarkMetaInfo]()
val ps = conn.prepareStatement("SELECT datname FROM pg_database WHERE datistemplate = false;")
val rs = ps.executeQuery()
var db: PostgresSpencerDB = null
while (rs.next()) {
val dbname = rs.getString(1)
try {
val dbconn = DriverManager.getConnection(s"jdbc:postgresql://postgres/$dbname", System.getenv("POSTGRES_USER"), System.getenv("POSTGRES_PASSWORD"))
db = new PostgresSpencerDB(dbname)
db.connect()
val countRes = db.conn.createStatement().executeQuery("SELECT COUNT(id) FROM objects")
assert(countRes.next())
val count = countRes.getLong(1)
countRes.close()
val dateRes = db.conn.createStatement().executeQuery("SELECT val FROM meta WHERE key = 'date'")
val date = if (dateRes.next()) {
dateRes.getString(1)
} else {
null
}
dateRes.close()
val commentRes = db.conn.createStatement().executeQuery("SELECT val FROM meta WHERE key = 'comment'")
val comment = if (commentRes.next()) {
commentRes.getString(1)
} else {
null
}
commentRes.close()
benchmarks = benchmarks ++ List(BenchmarkMetaInfo(dbname, count, date, comment))
} catch {
case e: PSQLException => ()
} finally {
db.shutdown()
}
}
rs.close()
ps.close()
conn.close()
benchmarks
}
}
class PostgresSpencerDB(dbname: String) {
var conn : Connection = _
def shutdown() = {
this.conn.close()
}
var insertUseStatement : java.sql.PreparedStatement = _
var insertUseBatchSize = 0
var insertEdgeStatement : java.sql.PreparedStatement = _
var insertEdgeBatchSize = 0
var finishEdgeStatement : java.sql.PreparedStatement = _
var finishEdgeBatchSize = 0
var insertCallStatement : java.sql.PreparedStatement = _
var insertCallBatchSize = 0
var insertObjectStatement : java.sql.PreparedStatement = _
val stacks: CallStackAbstraction = new CallStackAbstraction()
def handleEvent(evt: AnyEvt.Reader, idx: Long) {
try {
evt.which() match {
case AnyEvt.Which.FIELDLOAD =>
val fieldload = evt.getFieldload
insertUse(fieldload.getCallertag,
fieldload.getHoldertag,
fieldload.getCallermethod.toString,
"fieldload",
fieldload.getFname.toString,
idx,
fieldload.getThreadName.toString)
case AnyEvt.Which.FIELDSTORE =>
val fstore = evt.getFieldstore
insertUse(
caller = fstore.getCallertag,
callee = fstore.getHoldertag,
method = fstore.getCallermethod.toString,
kind = "fieldstore",
name = fstore.getFname.toString,
idx = idx,
thread = fstore.getThreadName.toString)
if (fstore.getNewval != 0) {
openEdge(
holder = fstore.getHoldertag,
callee = fstore.getNewval,
kind = "field",
name = fstore.getFname.toString,
thread = fstore.getThreadName.toString,
start = idx)
}
case AnyEvt.Which.METHODENTER =>
val menter = evt.getMethodenter
stacks.push(menter, idx)
if (menter.getName.toString == "<init>") {
insertObject(menter.getCalleetag, menter.getCalleeclass.toString,
menter.getCallsitefile.toString, menter.getCallsiteline,
menter.getThreadName.toString)
}
case AnyEvt.Which.METHODEXIT =>
val mexit = evt.getMethodexit
try {
val (returningObjTag: Long, variables : Array[Long]) = stacks.peek(mexit.getThreadName.toString) match {
case Some(idxdEnter) => (idxdEnter.enter.getCalleetag, idxdEnter.usedVariables)
case None => 4 // SPECIAL_VAL_JVM
}
stacks.pop(mexit) match {
case Left(x) => println("WARN: no matching call for "+x.toString+"! Was it transformed while the method was running?")// new AssertionError(x.toString)
case Right(menter) =>
val callerTag: Long = stacks.peek(mexit.getThreadName.toString) match {
case Some(t) => t.enter.getCalleetag
case None => 4 // SPECIAL_VAL_JVM
}
insertCall(
caller = callerTag,
callee = returningObjTag,
name = menter.enter.getName.toString,
start = menter.idx,
end = idx,
thread = menter.enter.getThreadName.toString,
callSiteFile = menter.enter.getCallsitefile.toString,
callSiteLine = menter.enter.getCallsiteline)
var i = 0
val Nvars = variables.length
while (i < Nvars) {
if (variables(i) > 0) {
assert(returningObjTag != 0, s"returningObj can't be 0! ${EventsUtil.methodExitToString(mexit)}")
try {
closeEdge(
holder = returningObjTag,
kind = "var",
start = variables(i),
end = idx)
} catch {
case e: AssertionError =>
println("method enter was: "+menter)
println(s"method exit was: #$idx: ${EventsUtil.methodExitToString(mexit)}")
throw e
}
}
i+=1
}
}
} catch {
case _: EmptyStackException =>
throw new AssertionError("#"+idx+": empty stack for " + EventsUtil.messageToString(evt))
}
case AnyEvt.Which.LATEINIT =>
val lateinit: LateInitEvt.Reader = evt.getLateinit
insertObject(lateinit.getCalleetag
, lateinit.getCalleeclass.toString
, "<jvm internals>"
, -1, "late initialisation")
for (fld <- lateinit.getFields) {
if (fld.getVal != 0) {
openEdge(lateinit.getCalleetag, fld.getVal, "field", fld.getName.toString, "<JVM thread>", 1)
}
}
case AnyEvt.Which.READMODIFY =>
val readmodify: ReadModifyEvt.Reader = evt.getReadmodify
val caller: Long = readmodify.getCallertag
val callee: Long = readmodify.getCalleetag
val kind: String = if (readmodify.getIsModify) {
"modify"
} else {
"read"
}
insertUse(
caller = caller,
callee = callee,
method = stacks.peek(readmodify.getThreadName.toString).map(_.enter.getName.toString).getOrElse("<unknown>"),
kind = kind,
name = readmodify.getFname.toString,
idx = idx,
thread = readmodify.getThreadName.toString)
case AnyEvt.Which.VARLOAD =>
val varload: Events.VarLoadEvt.Reader = evt.getVarload
insertUse(
caller = varload.getCallertag,
callee = varload.getCallertag,
method = varload.getCallermethod.toString,
kind = "varload",
name ="var_"+varload.getVar.toString,
idx = idx,
thread = varload.getThreadName.toString)
case AnyEvt.Which.VARSTORE =>
val varstore: Events.VarStoreEvt.Reader = evt.getVarstore
// step 1: emit use:
insertUse(
caller = varstore.getCallertag,
callee = varstore.getCallertag,
method = varstore.getCallermethod.toString,
kind = "varstore",
name ="var_"+varstore.getVar.toString,
idx = idx,
thread = varstore.getThreadName.toString)
if (! stacks.peek(varstore.getThreadName.toString).map(_.enter.getCalleetag).contains(varstore.getCallertag) ) {
println(s"""at $idx: last enter's callee tag and varstore's caller tag do not match:
|enter : ${stacks.peek(varstore.getThreadName.toString)}
|varstore: ${EventsUtil.varStoreToString(varstore)}""".stripMargin)
}
//step 2: set close old reference (if there was one):
val whenUsed = stacks.whenWasVarAsUsed(varstore.getThreadName.toString, varstore.getVar, idx)
if (whenUsed > 0) {
assert(varstore.getCallertag != 0, s"caller must not be 0: ${EventsUtil.varStoreToString(varstore)}")
closeEdge(
holder = varstore.getCallertag,
kind = "var",
start = whenUsed,
end = idx)
}
//step 3: open new reference (if there is one):
if (varstore.getNewval != 0) {
stacks.markVarAsUsed(varstore.getThreadName.toString, varstore.getVar, idx)
openEdge(
holder = varstore.getCallertag,
callee = varstore.getNewval,
kind = "var",
name = "var_" + varstore.getVar,
thread = varstore.getThreadName.toString,
start = idx)
} else {
stacks.markVarAsUnused(varstore.getThreadName.toString, varstore.getVar)
}
/*
*/
case other => ()
// throw new IllegalStateException(
// "do not know how to handle event kind " + other)
}
} catch {
case e: IllegalStateException =>
()
}
}
def insertCall(caller: Long, callee: Long, name: String, start : Long, end: Long, thread: String, callSiteFile: String, callSiteLine: Long) {
this.insertCallStatement.clearParameters()
this.insertCallStatement.setLong (1, caller)
this.insertCallStatement.setLong (2, callee)
this.insertCallStatement.setString(3, name)
this.insertCallStatement.setLong (4, start)
this.insertCallStatement.setLong (5, end)
this.insertCallStatement.setString(6, thread)
this.insertCallStatement.setString(7, callSiteFile)
this.insertCallStatement.setLong (8, callSiteLine)
this.insertCallStatement.addBatch()
this.insertCallBatchSize += 1
if (insertCallBatchSize > 10000) {
this.insertCallStatement.executeBatch()
this.conn.commit()
this.insertCallStatement.clearBatch()
this.insertCallBatchSize = 0
}
}
def insertObject(tag: Long, klass: String, allocationsitefile: String, allocationsiteline: Long, thread: String) {
assert(allocationsitefile != null)
assert(thread != null)
assert(tag != 0)
this.insertObjectStatement.clearParameters()
this.insertObjectStatement.setLong (1, tag)
this.insertObjectStatement.setString(2, klass.replace('/', '.'))
this.insertObjectStatement.setString(3, allocationsitefile)
this.insertObjectStatement.setLong (4, allocationsiteline)
this.insertObjectStatement.setString(5, thread)
this.insertObjectStatement.execute()
}
def openEdge(holder: Long, callee: Long, kind: String, name: String, thread: String, start: Long) {
assert(callee != 0, s"callee must not be 0")
assert(holder != 0, s"holder must not be 0")
assert(kind != null && kind.equals("var") || kind.equals("field"), "kind must be 'var' or 'field'")
this.insertEdgeStatement.clearParameters()
this.insertEdgeStatement.setLong (1, holder)
this.insertEdgeStatement.setLong (2, callee)
this.insertEdgeStatement.setString(3, kind)
this.insertEdgeStatement.setString(4, name)
this.insertEdgeStatement.setString(5, thread)
this.insertEdgeStatement.setLong (6, start)
this.insertEdgeStatement.addBatch()
this.insertEdgeBatchSize += 1
if (insertEdgeBatchSize > 10000) {
this.insertEdgeStatement.executeBatch()
this.conn.commit()
this.insertEdgeStatement.clearBatch()
this.insertEdgeBatchSize = 0
}
}
def closeEdge(holder: Long, kind: String, start: Long, end: Long) {
assert(holder != 0, "must have non-zero caller")
assert(kind != null && kind.equals("var") || kind.equals("field"), s"kind must be 'var' or 'field', but is $kind")
this.finishEdgeStatement.clearParameters()
this.finishEdgeStatement.setLong (1, end)
this.finishEdgeStatement.setString(2, kind)
this.finishEdgeStatement.setLong (3, start)
this.finishEdgeStatement.setLong (4, holder)
this.finishEdgeStatement.addBatch()
this.finishEdgeBatchSize += 1
if (finishEdgeBatchSize > 10000) {
this.finishEdgeStatement.executeBatch()
this.conn.commit()
this.finishEdgeStatement.clearBatch()
this.finishEdgeBatchSize = 0
}
}
def insertUse(caller: Long, callee: Long, method: String, kind: String, name: String, idx: Long, thread: String) {
assert(caller != 0, s"#$idx: caller must not be 0")
assert(callee != 0, s"#$idx: callee must not be 0")
assert(method != null && method.length > 0, "#$idx: method name must be given")
assert(kind != null && kind.equals("fieldstore") || kind.equals("fieldload") ||kind.equals("varload") || kind.equals("varstore") || kind.equals("read") || kind.equals("modify"), s"#$idx: kind must be 'varstore/load' or 'fieldstore/load' or 'read' or 'modify', but is $kind")
assert(idx > 0)
assert(thread != null && thread.length > 0, "#$idx: thread name must be given")
this.insertUseStatement.clearParameters()
this.insertUseStatement.setLong (1, caller)
this.insertUseStatement.setLong (2, callee)
this.insertUseStatement.setString(3, method)
this.insertUseStatement.setString(4, kind)
this.insertUseStatement.setString(5, name)
this.insertUseStatement.setLong (6, idx)
this.insertUseStatement.setString(7, thread)
this.insertUseStatement.addBatch()
this.insertUseBatchSize+=1
if (insertUseBatchSize > 10000) {
this.insertUseStatement.executeBatch()
this.conn.commit()
this.insertUseStatement.clearBatch()
this.insertUseBatchSize = 0
}
}
def prepareCaches(sqls: Seq[String]): Unit = {
sqls.foreach(sql => {
this.conn.createStatement().execute(sql)
this.conn.commit()
})
}
/** Runs a query using caching.
* The user has to close the ResultsSet
*
* @return the result set, stemming either from loading the cache,
* or from running the full query
*/
def getCachedOrRunQuery(query: ResultSetAnalyser): ResultSet = {
this.prepareCaches(query.precacheInnersSQL)
this.getCachedOrRunSQL(query.cacheKey, query.getSQLUsingCache)
}
def runSQLQuery(sql: String): ResultSet = {
try {
this.conn.createStatement().executeQuery(sql)
} catch {
case e: PSQLException =>
//add the original query to the exception for better debugability
throw new PSQLException(s"query: $sql", null, e)
}
}
/** Runs a SQL query using caching.
* The user has to close the ResultsSet
*
* @return the result set, stemming either from loading the cache,
* or from running the full query
*/
def getCachedOrRunSQL(cacheKey: String, sql: String): ResultSet = {
assert(cacheKey != null)
assert(sql != null)
assert(this.conn != null)
var ret: ResultSet = null
try {
ret = this.runSQLQuery(s"SELECT * FROM $cacheKey;")
} catch {
case e: PSQLException =>
this.conn.commit()
this.conn.createStatement().execute(
s"CREATE TABLE IF NOT EXISTS $cacheKey AS $sql ;")
this.conn.commit()
ret = this.runSQLQuery(s"SELECT * FROM $cacheKey")
}
ret
}
def getObjPercentage(query: String): Option[Float] = {
QueryParser.parseObjQuery(query) match {
case Left(_err) => None
case Right(q) =>
this.prepareCaches(q.precacheInnersSQL)
this.getCachedOrRunQuery(q).close()
val result = this.getCachedOrRunSQL("cache_perc_"+s"getPercentages($query)".hashCode.toString.replace("-","_"),
s"""SELECT
| ROUND(100.0*COUNT(id)/(SELECT COUNT(id) FROM objects WHERE id > 4), 2)
|FROM
| (${q.getCacheSQL}) counted""".
stripMargin)
assert(result.next())
val ret = result.getFloat(1)
result.close()
Some(ret)
}
}
def getFieldPercentage(query: String, minInstances: Int = 10): Option[Seq[(String, Float)]] = {
QueryParser.parseObjQuery(s"And(Obj() $query)") match {
case Left(_err) => None
case Right(q) =>
val cacheKey = s"cache_fieldpercent_n${minInstances}_"+(s"getPercentages($q)".hashCode.toString.replace("-","_"))
println(s"caching percentage of $query into $cacheKey")
this.prepareCaches(q.precacheInnersSQL)
this.getCachedOrRunQuery(q).close()
val result = this.getCachedOrRunSQL(cacheKey,
s"""-- select all fields, the number of objects, the number of passed objects, and the percentage
|SELECT total.field field, total.ncallees total, COALESCE(passes.ncallees,0) passed, round(100.0*COALESCE(passes.ncallees,0)/total.ncallees, 2) perc FROM
|(
|-- select all fields, and the number of objects that were referenced from them
|SELECT
| CONCAT(klass,'::',name) field, COUNT(DISTINCT callee) ncallees
|FROM
| refs
|INNER JOIN objects ON objects.id = refs.caller
|WHERE kind = 'field'
|AND klass NOT LIKE '[%'
|AND callee IN (SELECT id FROM objects)
|
|GROUP BY (klass, name)
|) total
|LEFT OUTER JOIN
|(
|-- select all fields, and the number of objects that passed that were referenced
|-- from them
|SELECT
| CONCAT(klass,'::',name) field, COUNT(DISTINCT callee) ncallees
|FROM
| refs
|INNER JOIN objects ON objects.id = refs.caller
|WHERE
| kind = 'field' AND
| klass NOT LIKE '[%'
| AND callee IN (${q.getCacheSQL}) -- <<<<< THIS IS THE INNER QUERY
|GROUP BY (klass, name)
|) passes
|ON total.field = passes.field
|ORDER BY perc DESC
""".stripMargin)
val ret = collection.mutable.ListBuffer[(String, Float)]()
while (result.next) {
ret.append((result.getString("field"), result.getFloat("perc")))
}
result.close()
Some(ret)
}
}
def getClassPercentage(query: String, minInstances: Int = 10): Option[Seq[(String, Float)]] = {
QueryParser.parseObjQuery(s"And(Obj() $query)") match {
case Left(_err) => None
case Right(q) =>
val cacheKey = s"cache_classpercent_n${minInstances}_"+(s"getPercentages($q)".hashCode.toString.replace("-","_"))
println(s"caching percentage of $query into $cacheKey")
this.prepareCaches(q.precacheInnersSQL)
this.getCachedOrRunQuery(q).close()
val result = this.getCachedOrRunSQL(cacheKey,
s"""SELECT
| filtered.klass,
| ROUND(100.0*npassed/ntotal,2) AS percentage
|FROM
|((SELECT objects.klass, COUNT(objects.id) npassed
|FROM objects
|JOIN
|(
| ${q.getCacheSQL}
|) AS counted
|ON objects.id = counted.id
|GROUP BY klass) filtered
|RIGHT OUTER JOIN (SELECT klass, COUNT(id) ntotal
| FROM objects
| WHERE id > 4
| GROUP BY klass) total
|ON filtered.klass = total.klass)
|WHERE ntotal >= $minInstances""".stripMargin)
val ret = collection.mutable.ListBuffer[(String, Float)]()
while (result.next) {
ret.append((result.getString(1), result.getFloat(2)))
}
result.close()
Some(ret)
}
}
def aproposObject(tag: Long): AproposData = {
val theObj = this.runSQLQuery(s"SELECT klass FROM objects WHERE id = $tag")
val klass = if (theObj.next()) {
Option(theObj.getString("klass"))
} else {
None
}
val usesTable = this.runSQLQuery(s"SELECT * FROM uses WHERE caller = $tag OR callee = $tag")
val useEvents = collection.mutable.ArrayBuffer[AproposUseEvent]()
while (usesTable.next) {
useEvents +=
AproposUseEvent(usesTable.getLong("caller"),
usesTable.getLong("callee"),
usesTable.getLong("idx"),
Option(usesTable.getString("kind")).getOrElse("<unknown kind>"),
Option(usesTable.getString("name")).getOrElse("<unknown name>"),
Option(usesTable.getString("thread")).getOrElse("<unknown thread>"),
"no comment")
}
usesTable.close()
val callsTable = this.runSQLQuery(s"SELECT * FROM calls WHERE caller = $tag OR callee = $tag")
val callsEvents = collection.mutable.ArrayBuffer[AproposCallEvent]()
while (callsTable.next) {
callsEvents +=
AproposCallEvent(
callsTable.getLong("caller")
, callsTable.getLong("callee")
, callsTable.getLong("callstart")
, callsTable.getLong("callend")
, callsTable.getString("name")
, callsTable.getString("callsitefile") + ":" + callsTable.getLong("callsiteline")
, Option(callsTable.getString("thread")).getOrElse("<unknown thread>")
, "no comment")
}
callsTable.close()
val refsTable = this.runSQLQuery(s"SELECT * FROM refs WHERE caller = $tag OR callee = $tag")
val refsEvents = collection.mutable.ArrayBuffer[AproposRefEvent]()
while (refsTable.next) {
refsEvents +=
AproposRefEvent(
refsTable.getLong("caller")
, refsTable.getLong("callee")
, refsTable.getLong("refstart")
, Option(refsTable.getLong("refend"))
, refsTable.getString("name")
, refsTable.getString("kind")
, Option(refsTable.getString("thread")).getOrElse("<unknown thread>")
, "no comment")
}
refsTable.close()
var evts = collection.mutable.ArrayBuffer[AproposEvent]()
evts.appendAll(useEvents)
evts.appendAll(callsEvents)
evts.appendAll(refsEvents)
evts = evts.sortWith({ case (evt1, evt2) => AproposEvent.startTime(evt1) < AproposEvent.startTime(evt2) })
AproposData(None, evts, klass)
}
def clearCaches(dbname: String, onlyStatistics: Boolean = false): Unit = {
val conn = DriverManager.getConnection("jdbc:postgresql://postgres/"+dbname, System.getenv("POSTGRES_USER"), System.getenv("POSTGRES_PASSWORD"))
val rs = conn.createStatement().executeQuery(
s"""SELECT table_name
|FROM information_schema.tables
|WHERE table_schema='public'
|AND table_type='BASE TABLE'
|AND table_name LIKE 'cache_%${if (onlyStatistics) {"perc%"} else {""}}'""".stripMargin)
println("\\nclearing caches: ")
while (rs.next()) {
val tblname = rs.getString(1)
if (tblname.startsWith("cache_")) {
print(tblname+" ")
this.conn.createStatement().execute(s"DROP TABLE IF EXISTS ${tblname}")
this.conn.commit()
}
}
rs.close()
conn.close()
}
// def getProperObjects: RDD[CassandraRow] = {
// //FIXME: use WHERE clause
// getTable("objects")
// .filter(_.getLong("id") > 0)
// .filter(
// _.getStringOption("comment")
// .forall(!_.contains("late initialisation")))
// }
def createIndices(): Unit = {
this.conn.createStatement().execute(
"CREATE INDEX calls_callstart_idx ON calls(callstart)")
this.conn.createStatement().execute(
"CREATE INDEX calls_callend_idx ON calls(callend)")
this.conn.createStatement().execute(
"CREATE INDEX uses_name_idx ON uses(name)")
}
/**
* The instrumentation is limited in what byte code it can generate, due to
* semantics of Java bytecode. This limits how constructor calls can be
* instrumented. Specifically, constructor and super-constructor calls will
* appear sequentially (with the innermost constructor first) in the data,
* not nested as they should be.
* This method will fix the order to be properly nested for each object.
*
* The constructor calls for object 12 of class C (which is a subclass of B,
* which is a subclass of A) could look like this:
*
* #10 call(<init>,12) // A constructor
* #20 exit(<init>) // A constructor
* #30 call(<init>,12) // B constructor
* #40 exit(<init>) // B constructor
* #50 call(<init>,12) // C constructor
* #60 exit(<init>) // C constructor
*
* Then this method will simply update the start and end times of the first
* call to span all constructor calls and delete the others
* to be:
*
* #10 call(<init>,12) // A,B,C constructor
* #60 exit(<init>) // A,B,C constructor
*/
def sortConstructorCalls(): Unit = {
val watch = Stopwatch.createStarted()
print("getting correction map.. ")
//first reorder the calls to this:
val ret = this.conn.createStatement().executeQuery(
"""SELECT
| callee,
| (array_agg(callstart ORDER BY callstart) || array_agg(callend ORDER BY callend)) as startend_times
|FROM
| calls
|WHERE
| name ='<init>'
|GROUP BY callee
|HAVING COUNT(*) >= 2;""".stripMargin)
while (ret.next()) {
// In the example above, times will be: [10, 30, 50, 20, 40, 60]
// A enter, B enter, C enter, A exit, B exit, C exit
val times = ret
.getArray("startend_times").getArray.asInstanceOf[Array[java.lang.Long]]
this.conn.createStatement().execute(s"UPDATE calls SET callstart = ${times(0)} WHERE callend = ${times.last}")
var i = 1
while (i < times.length/2) {
this.conn.createStatement().execute(s"DELETE FROM calls WHERE callend = ${times(times.length - 1 - i)}")
i += 1
}
}
ret.close()
println(s" done (${watch.stop()})")
}
def computeEdgeEnds(): Unit = {
val res = this.conn.createStatement().executeQuery(
"""
|SELECT
| caller,
| name,
| array_agg(refstart ORDER BY refstart) AS refstarts,
| array_agg(kind ORDER BY refstart) AS refkinds
|FROM
| refs
|WHERE kind = 'field'
|GROUP BY
| caller, name
""".stripMargin)
var cnt = 0
while(res.next()) {
val caller = res.getLong("caller")
val assignmentStarts = res.getArray("refstarts").getArray.asInstanceOf[Array[java.lang.Long]]
val assignmentKinds = res.getArray("refkinds").getArray.asInstanceOf[Array[String]]
assert(assignmentStarts.length == assignmentKinds.length)
val N = assignmentKinds.length
var i = 0
while (i<N-1) {
closeEdge(caller, assignmentKinds(i), assignmentStarts(i), assignmentStarts(i+1))
i = i+1
cnt = cnt+1
}
}
res.close()
println(s"closed $cnt assignments")
}
def computeLastObjUsages(): Unit = {
val firstLastUses = this.conn.createStatement().executeQuery(
"""SELECT
| callee,
| min(idx) AS firstusage,
| max(idx) AS lastusage
|FROM uses
|GROUP BY callee
|""".stripMargin)
while (firstLastUses.next()) {
val id = firstLastUses.getLong(1) // callee
val fst = firstLastUses.getLong(2) // firstusage
val lst = firstLastUses.getLong(3) // lastusage
this.conn.createStatement().execute(
s"""UPDATE objects
|SET
| firstUsage = $fst,
| lastUsage = $lst
|WHERE id = $id""".stripMargin)
}
firstLastUses.close()
}
def storeClassFile(logDir: Path, file: File) {
val relPath = logDir.relativize(file.toPath)
val segments = for (i <- 0 until relPath.getNameCount) yield relPath.getName(i)
val className = segments.mkString(".").replace(".class", "")
val fileStream = new FileInputStream(file)
val stat = this.conn.prepareStatement("INSERT INTO classdumps VALUES (?, ?)")
stat.setString(1, className)
stat.setBinaryStream(2, fileStream)
stat.execute()
}
def loadBytecodeDir(logDir: Path) = {
val files = FileUtils.listFiles(logDir.toFile, null, true)
for (f <- files.iterator()) {
storeClassFile(logDir, f.asInstanceOf[File])
}
}
def loadFrom(path: String, logDir: Path) {
println(s"loading from $path, logDir=$logDir")
var hadLateInits = false
var doneWithHandleInits = false
val stopwatch: Stopwatch = Stopwatch.createStarted
this.connect_(true)
this.loadBytecodeDir(Paths.get(logDir.toString, "input"))
val events = TraceFiles.fromPath(path).iterator
var watch = Stopwatch.createStarted()
val printBatchSize = 10e6
var i : Long = 1
while (events.hasNext) {
val evt: AnyEvt.Reader = events.next
if (!Util.isTrickyThreadName(EventsUtil.getThread(evt))) {
handleEvent(evt, i)
}
if ((i-1) % printBatchSize == 0) {
val evtsPerSec = if (i > 1) {
Math.round(printBatchSize*1000.0/watch.elapsed(TimeUnit.MILLISECONDS))
} else {
"-"
}
watch.reset().start()
println(
s"#${((i-1) / 1e6).toFloat}e6 " +
s"($evtsPerSec evts/sec)..")
}
i += 1
}
println("loading "+(i-1)+" events took "+stopwatch.stop())
if (this.insertEdgeBatchSize > 0) {
this.insertEdgeStatement.executeBatch()
}
if (this.finishEdgeBatchSize > 0) {
this.finishEdgeStatement.executeBatch()
}
if (this.insertUseBatchSize > 0) {
this.insertUseStatement.executeBatch()
}
this.conn.commit()
createIndices()
watch = Stopwatch.createStarted()
print("sorting constructor calls... ")
sortConstructorCalls()
println(s"\\tdone after ${watch.stop()}")
watch.reset().start()
print("computing edge ends... ")
computeEdgeEnds()
println(s"done after ${watch.stop()}")
watch.reset().start()
print("computing last obj usages... ")
computeLastObjUsages()
println(s"done after ${watch.stop()}")
watch.reset().start()
this.conn.commit()
this.conn.createStatement().execute(
"CREATE TABLE meta (key text UNIQUE, val text, PRIMARY KEY(key));")
this.conn.createStatement().execute(
s"""INSERT INTO meta
| (key, val)
|VALUES
| ('comment', 'loaded from $path');
""".stripMargin)
val now = Calendar.getInstance()
now.setLenient(false)
this.conn.createStatement().execute(
s"""INSERT INTO meta
| (key, val)
|VALUES
| ('date', '${now.get(Calendar.YEAR)}-${"%02d".format(now.get(Calendar.MONTH) + 1)}-${now.get(Calendar.DAY_OF_MONTH)}');
""".stripMargin)
this.conn.createStatement().execute(
s"""INSERT INTO meta
| (key, val)
|VALUES
| ('eventcount', '${i-1}');
""".stripMargin)
this.conn.commit()
val e = this.conn.createStatement().execute(
"""CREATE EXTENSION cstore_fdw;
|CREATE SERVER cstore_server FOREIGN DATA WRAPPER cstore_fdw;
|DROP FOREIGN TABLE IF EXISTS uses_cstore;
|
|CREATE FOREIGN TABLE uses_cstore (
| caller bigint,
| callee bigint,
| name text,
| method text,
| kind text,
| idx bigint,
| thread varchar(80),
| comment text)
|SERVER cstore_server
|OPTIONS(compression 'pglz');
|
|-- load negative callees first to improve skip index peformance:
|INSERT INTO uses_cstore SELECT * FROM uses WHERE callee < 0;
|INSERT INTO uses_cstore SELECT * FROM uses WHERE callee >= 0;
""".stripMargin)
this.conn.commit()
cacheQueries()
this.conn.commit()
this.shutdown()
}
def cacheQueries(): Unit = {
val qs = QueryParser.seriesOfQueries()
for (q <- qs) {
val time = Stopwatch.createStarted()
print(s"pre caching $q.. ")
this.prepareCaches(q.precacheInnersSQL)
val res = this.getCachedOrRunSQL(q.cacheKey, q.getSQLUsingCache)
this.getClassPercentage(q.toString)
this.getFieldPercentage(q.toString)
println(s"done after $time")
}
val del = QueryParser.wrapQueries("Not", QueryParser.seriesOfQueries())
for (q <- del) {
print(s"dropping $q.. ")
this.conn.createStatement().execute(s"DROP TABLE IF EXISTS ${q.cacheKey};")
this.conn.commit()
println("done")
}
}
def connect(): Unit = {
this.connect_(false)
}
def connect_(overwrite: Boolean = false): Unit = {
if (overwrite) {
createFreshTables(this.dbname)
}
initPreparedStatements(this.dbname)
}
def initDbConnection(): Unit = {
val opts = Map(
"url" -> s"jdbc:postgresql://postgres/$dbname",
"dbtable" -> dbname,
"user" -> "spencer"
)
if (this.conn != null) {
this.conn.close()
}
this.conn = DriverManager.getConnection(s"jdbc:postgresql://postgres/$dbname", System.getenv("POSTGRES_USER"), System.getenv("POSTGRES_PASSWORD"))
this.conn.setAutoCommit(false)
}
def createFreshTables(dbname: String) {
if (this.conn != null) {
this.conn.close()
}
this.conn = DriverManager.getConnection("jdbc:postgresql://postgres/template1", System.getenv("POSTGRES_USER"), System.getenv("POSTGRES_PASSWORD"))
this.conn.createStatement().execute(s"DROP DATABASE IF EXISTS $dbname")
this.conn.createStatement().execute(s"CREATE DATABASE $dbname")
initDbConnection()
this.conn.createStatement().execute(
"""CREATE TABLE objects (
| id bigint UNIQUE,
| klass text,
| allocationsitefile text,
| allocationsiteline integer,
| firstUsage bigint UNIQUE,
| lastUsage bigint UNIQUE,
| thread text,
| PRIMARY KEY(id))""".stripMargin)
this.conn.createStatement().execute(
"""CREATE TABLE refs (
| caller bigint,
| callee bigint,
| kind varchar(10),
| name varchar(80),
| refstart bigint UNIQUE,
| refend bigint,
| thread text,
| PRIMARY KEY (caller, kind, refstart))
""".stripMargin)
this.conn.createStatement().execute(
"""CREATE TABLE calls (
| caller bigint,
| callee bigint,
| name text,
| callstart bigint,
| callend bigint,
| callsitefile text,
| callsiteline bigint,
| thread text,
| PRIMARY KEY (caller, callee, callstart, callend))
""".stripMargin)
this.conn.createStatement().execute(
"""CREATE TABLE uses (
| caller bigint,
| callee bigint,
| name text,
| method text,
| kind text,
| idx bigint,
| thread varchar(80),
| PRIMARY KEY(caller, callee, idx))
""".stripMargin)
this.conn.createStatement().execute(
"""CREATE TABLE classdumps (
| classname text,
| bytecode bytea,
| PRIMARY KEY(classname))
""".stripMargin)
this.conn.commit()
}
def initPreparedStatements(keyspace: String): Unit = {
this.initDbConnection()
this.insertObjectStatement = this.conn.prepareStatement(
"""INSERT INTO objects (
| id,
| klass,
| allocationsitefile,
| allocationsiteline,
| thread) VALUES (?, ?, ?, ?, ?)
| ON CONFLICT(id) DO UPDATE SET klass = EXCLUDED.klass;""".stripMargin)
this.insertEdgeStatement = this.conn.prepareStatement(
"""INSERT INTO refs (
| caller,
| callee,
| kind,
| name,
| thread,
| refstart) VALUES (?, ?, ?, ?, ?, ? )""".stripMargin)
this.finishEdgeStatement = this.conn.prepareStatement(
"""UPDATE refs
|SET refend = ?
|WHERE kind = ? AND refstart = ? and caller = ?""".stripMargin)
this.insertUseStatement = this.conn.prepareStatement(
"""INSERT INTO uses (
| caller,
| callee,
| method,
| kind,
| name,
| idx,
| thread) VALUES (?, ?, ?, ?, ?, ?, ? )""".stripMargin)
this.insertCallStatement = this.conn.prepareStatement(
"""INSERT INTO calls (
| caller,
| callee,
| name,
| callstart,
| callend,
| thread,
| callsitefile,
| callsiteline) VALUES (?, ?, ?, ?, ?, ?, ?, ?)""".stripMargin)
}
}
| kaeluka/spencer-all | spencer-analyse/src/main/scala/com/github/kaeluka/spencer/tracefiles/PostgresSpencerDB.scala | Scala | mit | 37,940 |
package recipestore.nlp.corpus.ingredient.stats.models
//Order agnostic tuple
class Pair(val first: String, val second: String) {
private val values = Set(first, second)
def canEqual(other: Any): Boolean = other.isInstanceOf[Pair]
override def equals(other: Any): Boolean = other match {
case that: Pair =>
(that canEqual this) &&
values == that.values
case _ => false
}
override def hashCode(): Int = {
val state = Seq(values)
state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
}
}
| prad-a-RuntimeException/semantic-store | src/main/scala/recipestore/nlp/corpus/ingredient/stats/models/Pair.scala | Scala | mit | 536 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.sql.Date
import java.util.Locale
import scala.collection.JavaConverters._
import org.apache.hadoop.io.{LongWritable, Text}
import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
import org.scalatest.Matchers._
import org.apache.spark.sql.catalyst.expressions.{InSet, Literal, NamedExpression}
import org.apache.spark.sql.execution.ProjectExec
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
class ColumnExpressionSuite extends QueryTest with SharedSparkSession {
import testImplicits._
private lazy val booleanData = {
spark.createDataFrame(sparkContext.parallelize(
Row(false, false) ::
Row(false, true) ::
Row(true, false) ::
Row(true, true) :: Nil),
StructType(Seq(StructField("a", BooleanType), StructField("b", BooleanType))))
}
private lazy val nullData = Seq(
(Some(1), Some(1)), (Some(1), Some(2)), (Some(1), None), (None, None)).toDF("a", "b")
test("column names with space") {
val df = Seq((1, "a")).toDF("name with space", "name.with.dot")
checkAnswer(
df.select(df("name with space")),
Row(1) :: Nil)
checkAnswer(
df.select($"name with space"),
Row(1) :: Nil)
checkAnswer(
df.select(col("name with space")),
Row(1) :: Nil)
checkAnswer(
df.select("name with space"),
Row(1) :: Nil)
checkAnswer(
df.select(expr("`name with space`")),
Row(1) :: Nil)
}
test("column names with dot") {
val df = Seq((1, "a")).toDF("name with space", "name.with.dot").as("a")
checkAnswer(
df.select(df("`name.with.dot`")),
Row("a") :: Nil)
checkAnswer(
df.select($"`name.with.dot`"),
Row("a") :: Nil)
checkAnswer(
df.select(col("`name.with.dot`")),
Row("a") :: Nil)
checkAnswer(
df.select("`name.with.dot`"),
Row("a") :: Nil)
checkAnswer(
df.select(expr("`name.with.dot`")),
Row("a") :: Nil)
checkAnswer(
df.select(df("a.`name.with.dot`")),
Row("a") :: Nil)
checkAnswer(
df.select($"a.`name.with.dot`"),
Row("a") :: Nil)
checkAnswer(
df.select(col("a.`name.with.dot`")),
Row("a") :: Nil)
checkAnswer(
df.select("a.`name.with.dot`"),
Row("a") :: Nil)
checkAnswer(
df.select(expr("a.`name.with.dot`")),
Row("a") :: Nil)
}
test("alias and name") {
val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList")
assert(df.select(df("a").as("b")).columns.head === "b")
assert(df.select(df("a").alias("b")).columns.head === "b")
assert(df.select(df("a").name("b")).columns.head === "b")
}
test("as propagates metadata") {
val metadata = new MetadataBuilder
metadata.putString("key", "value")
val origCol = $"a".as("b", metadata.build())
val newCol = origCol.as("c")
assert(newCol.expr.asInstanceOf[NamedExpression].metadata.getString("key") === "value")
}
test("collect on column produced by a binary operator") {
val df = Seq((1, 2, 3)).toDF("a", "b", "c")
checkAnswer(df.select(df("a") + df("b")), Seq(Row(3)))
checkAnswer(df.select(df("a") + df("b").as("c")), Seq(Row(3)))
}
test("star") {
checkAnswer(testData.select($"*"), testData.collect().toSeq)
}
test("star qualified by data frame object") {
val df = testData.toDF
val goldAnswer = df.collect().toSeq
checkAnswer(df.select(df("*")), goldAnswer)
val df1 = df.select(df("*"), lit("abcd").as("litCol"))
checkAnswer(df1.select(df("*")), goldAnswer)
}
test("star qualified by table name") {
checkAnswer(testData.as("testData").select($"testData.*"), testData.collect().toSeq)
}
test("+") {
checkAnswer(
testData2.select($"a" + 1),
testData2.collect().toSeq.map(r => Row(r.getInt(0) + 1)))
checkAnswer(
testData2.select($"a" + $"b" + 2),
testData2.collect().toSeq.map(r => Row(r.getInt(0) + r.getInt(1) + 2)))
}
test("-") {
checkAnswer(
testData2.select($"a" - 1),
testData2.collect().toSeq.map(r => Row(r.getInt(0) - 1)))
checkAnswer(
testData2.select($"a" - $"b" - 2),
testData2.collect().toSeq.map(r => Row(r.getInt(0) - r.getInt(1) - 2)))
}
test("*") {
checkAnswer(
testData2.select($"a" * 10),
testData2.collect().toSeq.map(r => Row(r.getInt(0) * 10)))
checkAnswer(
testData2.select($"a" * $"b"),
testData2.collect().toSeq.map(r => Row(r.getInt(0) * r.getInt(1))))
}
test("/") {
checkAnswer(
testData2.select($"a" / 2),
testData2.collect().toSeq.map(r => Row(r.getInt(0).toDouble / 2)))
checkAnswer(
testData2.select($"a" / $"b"),
testData2.collect().toSeq.map(r => Row(r.getInt(0).toDouble / r.getInt(1))))
}
test("%") {
checkAnswer(
testData2.select($"a" % 2),
testData2.collect().toSeq.map(r => Row(r.getInt(0) % 2)))
checkAnswer(
testData2.select($"a" % $"b"),
testData2.collect().toSeq.map(r => Row(r.getInt(0) % r.getInt(1))))
}
test("unary -") {
checkAnswer(
testData2.select(-$"a"),
testData2.collect().toSeq.map(r => Row(-r.getInt(0))))
}
test("unary !") {
checkAnswer(
complexData.select(!$"b"),
complexData.collect().toSeq.map(r => Row(!r.getBoolean(3))))
}
test("isNull") {
checkAnswer(
nullStrings.toDF.where($"s".isNull),
nullStrings.collect().toSeq.filter(r => r.getString(1) eq null))
checkAnswer(
sql("select isnull(null), isnull(1)"),
Row(true, false))
}
test("isNotNull") {
checkAnswer(
nullStrings.toDF.where($"s".isNotNull),
nullStrings.collect().toSeq.filter(r => r.getString(1) ne null))
checkAnswer(
sql("select isnotnull(null), isnotnull('a')"),
Row(false, true))
}
test("isNaN") {
val testData = spark.createDataFrame(sparkContext.parallelize(
Row(Double.NaN, Float.NaN) ::
Row(math.log(-1), math.log(-3).toFloat) ::
Row(null, null) ::
Row(Double.MaxValue, Float.MinValue):: Nil),
StructType(Seq(StructField("a", DoubleType), StructField("b", FloatType))))
checkAnswer(
testData.select($"a".isNaN, $"b".isNaN),
Row(true, true) :: Row(true, true) :: Row(false, false) :: Row(false, false) :: Nil)
checkAnswer(
testData.select(isnan($"a"), isnan($"b")),
Row(true, true) :: Row(true, true) :: Row(false, false) :: Row(false, false) :: Nil)
checkAnswer(
sql("select isnan(15), isnan('invalid')"),
Row(false, false))
}
test("nanvl") {
val testData = spark.createDataFrame(sparkContext.parallelize(
Row(null, 3.0, Double.NaN, Double.PositiveInfinity, 1.0f, 4) :: Nil),
StructType(Seq(StructField("a", DoubleType), StructField("b", DoubleType),
StructField("c", DoubleType), StructField("d", DoubleType),
StructField("e", FloatType), StructField("f", IntegerType))))
checkAnswer(
testData.select(
nanvl($"a", lit(5)), nanvl($"b", lit(10)), nanvl(lit(10), $"b"),
nanvl($"c", lit(null).cast(DoubleType)), nanvl($"d", lit(10)),
nanvl($"b", $"e"), nanvl($"e", $"f")),
Row(null, 3.0, 10.0, null, Double.PositiveInfinity, 3.0, 1.0)
)
testData.createOrReplaceTempView("t")
checkAnswer(
sql(
"select nanvl(a, 5), nanvl(b, 10), nanvl(10, b), nanvl(c, null), nanvl(d, 10), " +
" nanvl(b, e), nanvl(e, f) from t"),
Row(null, 3.0, 10.0, null, Double.PositiveInfinity, 3.0, 1.0)
)
}
test("===") {
checkAnswer(
testData2.filter($"a" === 1),
testData2.collect().toSeq.filter(r => r.getInt(0) == 1))
checkAnswer(
testData2.filter($"a" === $"b"),
testData2.collect().toSeq.filter(r => r.getInt(0) == r.getInt(1)))
}
test("<=>") {
checkAnswer(
nullData.filter($"b" <=> 1),
Row(1, 1) :: Nil)
checkAnswer(
nullData.filter($"b" <=> null),
Row(1, null) :: Row(null, null) :: Nil)
checkAnswer(
nullData.filter($"a" <=> $"b"),
Row(1, 1) :: Row(null, null) :: Nil)
val nullData2 = spark.createDataFrame(sparkContext.parallelize(
Row("abc") ::
Row(null) ::
Row("xyz") :: Nil),
StructType(Seq(StructField("a", StringType, true))))
checkAnswer(
nullData2.filter($"a" <=> null),
Row(null) :: Nil)
}
test("=!=") {
checkAnswer(
nullData.filter($"b" =!= 1),
Row(1, 2) :: Nil)
checkAnswer(nullData.filter($"b" =!= null), Nil)
checkAnswer(
nullData.filter($"a" =!= $"b"),
Row(1, 2) :: Nil)
}
test(">") {
checkAnswer(
testData2.filter($"a" > 1),
testData2.collect().toSeq.filter(r => r.getInt(0) > 1))
checkAnswer(
testData2.filter($"a" > $"b"),
testData2.collect().toSeq.filter(r => r.getInt(0) > r.getInt(1)))
}
test(">=") {
checkAnswer(
testData2.filter($"a" >= 1),
testData2.collect().toSeq.filter(r => r.getInt(0) >= 1))
checkAnswer(
testData2.filter($"a" >= $"b"),
testData2.collect().toSeq.filter(r => r.getInt(0) >= r.getInt(1)))
}
test("<") {
checkAnswer(
testData2.filter($"a" < 2),
testData2.collect().toSeq.filter(r => r.getInt(0) < 2))
checkAnswer(
testData2.filter($"a" < $"b"),
testData2.collect().toSeq.filter(r => r.getInt(0) < r.getInt(1)))
}
test("<=") {
checkAnswer(
testData2.filter($"a" <= 2),
testData2.collect().toSeq.filter(r => r.getInt(0) <= 2))
checkAnswer(
testData2.filter($"a" <= $"b"),
testData2.collect().toSeq.filter(r => r.getInt(0) <= r.getInt(1)))
}
test("between") {
val testData = sparkContext.parallelize(
(0, 1, 2) ::
(1, 2, 3) ::
(2, 1, 0) ::
(2, 2, 4) ::
(3, 1, 6) ::
(3, 2, 0) :: Nil).toDF("a", "b", "c")
val expectAnswer = testData.collect().toSeq.
filter(r => r.getInt(0) >= r.getInt(1) && r.getInt(0) <= r.getInt(2))
checkAnswer(testData.filter($"a".between($"b", $"c")), expectAnswer)
}
test("in") {
val df = Seq((1, "x"), (2, "y"), (3, "z")).toDF("a", "b")
checkAnswer(df.filter($"a".isin(1, 2)),
df.collect().toSeq.filter(r => r.getInt(0) == 1 || r.getInt(0) == 2))
checkAnswer(df.filter($"a".isin(3, 2)),
df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 2))
checkAnswer(df.filter($"a".isin(3, 1)),
df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 1))
checkAnswer(df.filter($"b".isin("y", "x")),
df.collect().toSeq.filter(r => r.getString(1) == "y" || r.getString(1) == "x"))
checkAnswer(df.filter($"b".isin("z", "x")),
df.collect().toSeq.filter(r => r.getString(1) == "z" || r.getString(1) == "x"))
checkAnswer(df.filter($"b".isin("z", "y")),
df.collect().toSeq.filter(r => r.getString(1) == "z" || r.getString(1) == "y"))
// Auto casting should work with mixture of different types in collections
checkAnswer(df.filter($"a".isin(1.toShort, "2")),
df.collect().toSeq.filter(r => r.getInt(0) == 1 || r.getInt(0) == 2))
checkAnswer(df.filter($"a".isin("3", 2.toLong)),
df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 2))
checkAnswer(df.filter($"a".isin(3, "1")),
df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 1))
val df2 = Seq((1, Seq(1)), (2, Seq(2)), (3, Seq(3))).toDF("a", "b")
val e = intercept[AnalysisException] {
df2.filter($"a".isin($"b"))
}
Seq("cannot resolve", "due to data type mismatch: Arguments must be same type but were")
.foreach { s =>
assert(e.getMessage.toLowerCase(Locale.ROOT).contains(s.toLowerCase(Locale.ROOT)))
}
}
test("IN/INSET with bytes, shorts, ints, dates") {
def check(): Unit = {
val values = Seq(
(Byte.MinValue, Some(Short.MinValue), Int.MinValue, Date.valueOf("2017-01-01")),
(Byte.MaxValue, None, Int.MaxValue, null))
val df = values.toDF("b", "s", "i", "d")
checkAnswer(df.select($"b".isin(Byte.MinValue, Byte.MaxValue)), Seq(Row(true), Row(true)))
checkAnswer(df.select($"b".isin(-1.toByte, 2.toByte)), Seq(Row(false), Row(false)))
checkAnswer(df.select($"s".isin(Short.MinValue, 1.toShort)), Seq(Row(true), Row(null)))
checkAnswer(df.select($"s".isin(0.toShort, null)), Seq(Row(null), Row(null)))
checkAnswer(df.select($"i".isin(0, Int.MinValue)), Seq(Row(true), Row(false)))
checkAnswer(df.select($"i".isin(null, Int.MinValue)), Seq(Row(true), Row(null)))
checkAnswer(
df.select($"d".isin(Date.valueOf("1950-01-01"), Date.valueOf("2017-01-01"))),
Seq(Row(true), Row(null)))
checkAnswer(
df.select($"d".isin(Date.valueOf("1950-01-01"), null)),
Seq(Row(null), Row(null)))
}
withSQLConf(SQLConf.OPTIMIZER_INSET_CONVERSION_THRESHOLD.key -> "10") {
check()
}
withSQLConf(
SQLConf.OPTIMIZER_INSET_CONVERSION_THRESHOLD.key -> "0",
SQLConf.OPTIMIZER_INSET_SWITCH_THRESHOLD.key -> "0") {
check()
}
withSQLConf(
SQLConf.OPTIMIZER_INSET_CONVERSION_THRESHOLD.key -> "0",
SQLConf.OPTIMIZER_INSET_SWITCH_THRESHOLD.key -> "20") {
check()
}
}
test("isInCollection: Scala Collection") {
val df = Seq((1, "x"), (2, "y"), (3, "z")).toDF("a", "b")
// Test with different types of collections
checkAnswer(df.filter($"a".isInCollection(Seq(3, 1))),
df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 1))
checkAnswer(df.filter($"a".isInCollection(Seq(1, 2).toSet)),
df.collect().toSeq.filter(r => r.getInt(0) == 1 || r.getInt(0) == 2))
checkAnswer(df.filter($"a".isInCollection(Seq(3, 2).toArray)),
df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 2))
checkAnswer(df.filter($"a".isInCollection(Seq(3, 1).toList)),
df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 1))
val df2 = Seq((1, Seq(1)), (2, Seq(2)), (3, Seq(3))).toDF("a", "b")
val e = intercept[AnalysisException] {
df2.filter($"a".isInCollection(Seq($"b")))
}
Seq("cannot resolve", "due to data type mismatch: Arguments must be same type but were")
.foreach { s =>
assert(e.getMessage.toLowerCase(Locale.ROOT).contains(s.toLowerCase(Locale.ROOT)))
}
}
test("&&") {
checkAnswer(
booleanData.filter($"a" && true),
Row(true, false) :: Row(true, true) :: Nil)
checkAnswer(
booleanData.filter($"a" && false),
Nil)
checkAnswer(
booleanData.filter($"a" && $"b"),
Row(true, true) :: Nil)
}
test("||") {
checkAnswer(
booleanData.filter($"a" || true),
booleanData.collect())
checkAnswer(
booleanData.filter($"a" || false),
Row(true, false) :: Row(true, true) :: Nil)
checkAnswer(
booleanData.filter($"a" || $"b"),
Row(false, true) :: Row(true, false) :: Row(true, true) :: Nil)
}
test("SPARK-7321 when conditional statements") {
val testData = (1 to 3).map(i => (i, i.toString)).toDF("key", "value")
checkAnswer(
testData.select(when($"key" === 1, -1).when($"key" === 2, -2).otherwise(0)),
Seq(Row(-1), Row(-2), Row(0))
)
// Without the ending otherwise, return null for unmatched conditions.
// Also test putting a non-literal value in the expression.
checkAnswer(
testData.select(when($"key" === 1, lit(0) - $"key").when($"key" === 2, -2)),
Seq(Row(-1), Row(-2), Row(null))
)
// Test error handling for invalid expressions.
intercept[IllegalArgumentException] { $"key".when($"key" === 1, -1) }
intercept[IllegalArgumentException] { $"key".otherwise(-1) }
intercept[IllegalArgumentException] { when($"key" === 1, -1).otherwise(-1).otherwise(-1) }
}
test("sqrt") {
checkAnswer(
testData.select(sqrt($"key")).orderBy($"key".asc),
(1 to 100).map(n => Row(math.sqrt(n)))
)
checkAnswer(
testData.select(sqrt($"value"), $"key").orderBy($"key".asc, $"value".asc),
(1 to 100).map(n => Row(math.sqrt(n), n))
)
checkAnswer(
testData.select(sqrt(lit(null))),
(1 to 100).map(_ => Row(null))
)
}
test("upper") {
checkAnswer(
lowerCaseData.select(upper($"l")),
('a' to 'd').map(c => Row(c.toString.toUpperCase(Locale.ROOT)))
)
checkAnswer(
testData.select(upper($"value"), $"key"),
(1 to 100).map(n => Row(n.toString, n))
)
checkAnswer(
testData.select(upper(lit(null))),
(1 to 100).map(n => Row(null))
)
checkAnswer(
sql("SELECT upper('aB'), ucase('cDe')"),
Row("AB", "CDE"))
}
test("lower") {
checkAnswer(
upperCaseData.select(lower($"L")),
('A' to 'F').map(c => Row(c.toString.toLowerCase(Locale.ROOT)))
)
checkAnswer(
testData.select(lower($"value"), $"key"),
(1 to 100).map(n => Row(n.toString, n))
)
checkAnswer(
testData.select(lower(lit(null))),
(1 to 100).map(n => Row(null))
)
checkAnswer(
sql("SELECT lower('aB'), lcase('cDe')"),
Row("ab", "cde"))
}
test("monotonically_increasing_id") {
// Make sure we have 2 partitions, each with 2 records.
val df = sparkContext.parallelize(Seq[Int](), 2).mapPartitions { _ =>
Iterator(Tuple1(1), Tuple1(2))
}.toDF("a")
checkAnswer(
df.select(monotonically_increasing_id(), expr("monotonically_increasing_id()")),
Row(0L, 0L) ::
Row(1L, 1L) ::
Row((1L << 33) + 0L, (1L << 33) + 0L) ::
Row((1L << 33) + 1L, (1L << 33) + 1L) :: Nil
)
}
test("spark_partition_id") {
// Make sure we have 2 partitions, each with 2 records.
val df = sparkContext.parallelize(Seq[Int](), 2).mapPartitions { _ =>
Iterator(Tuple1(1), Tuple1(2))
}.toDF("a")
checkAnswer(
df.select(spark_partition_id()),
Row(0) :: Row(0) :: Row(1) :: Row(1) :: Nil
)
}
test("input_file_name, input_file_block_start, input_file_block_length - more than one source") {
withTempView("tempView1") {
withTable("tab1", "tab2") {
val data = sparkContext.parallelize(0 to 9).toDF("id")
data.write.saveAsTable("tab1")
data.write.saveAsTable("tab2")
data.createOrReplaceTempView("tempView1")
Seq("input_file_name", "input_file_block_start", "input_file_block_length").foreach { f =>
val e = intercept[AnalysisException] {
sql(s"SELECT *, $f() FROM tab1 JOIN tab2 ON tab1.id = tab2.id")
}.getMessage
assert(e.contains(s"'$f' does not support more than one source"))
}
def checkResult(
fromClause: String,
exceptionExpected: Boolean,
numExpectedRows: Int = 0): Unit = {
val stmt = s"SELECT *, input_file_name() FROM ($fromClause)"
if (exceptionExpected) {
val e = intercept[AnalysisException](sql(stmt)).getMessage
assert(e.contains("'input_file_name' does not support more than one source"))
} else {
assert(sql(stmt).count() == numExpectedRows)
}
}
checkResult(
"SELECT * FROM tab1 UNION ALL SELECT * FROM tab2 UNION ALL SELECT * FROM tab2",
exceptionExpected = false,
numExpectedRows = 30)
checkResult(
"(SELECT * FROM tempView1 NATURAL JOIN tab2) UNION ALL SELECT * FROM tab2",
exceptionExpected = false,
numExpectedRows = 20)
checkResult(
"(SELECT * FROM tab1 UNION ALL SELECT * FROM tab2) NATURAL JOIN tempView1",
exceptionExpected = false,
numExpectedRows = 20)
checkResult(
"(SELECT * FROM tempView1 UNION ALL SELECT * FROM tab2) NATURAL JOIN tab2",
exceptionExpected = true)
checkResult(
"(SELECT * FROM tab1 NATURAL JOIN tab2) UNION ALL SELECT * FROM tab2",
exceptionExpected = true)
checkResult(
"(SELECT * FROM tab1 UNION ALL SELECT * FROM tab2) NATURAL JOIN tab2",
exceptionExpected = true)
}
}
}
test("input_file_name, input_file_block_start, input_file_block_length - FileScanRDD") {
withTempPath { dir =>
val data = sparkContext.parallelize(0 to 10).toDF("id")
data.write.parquet(dir.getCanonicalPath)
// Test the 3 expressions when reading from files
val q = spark.read.parquet(dir.getCanonicalPath).select(
input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()"))
val firstRow = q.head()
assert(firstRow.getString(0).contains(dir.toURI.getPath))
assert(firstRow.getLong(1) == 0)
assert(firstRow.getLong(2) > 0)
// Now read directly from the original RDD without going through any files to make sure
// we are returning empty string, -1, and -1.
checkAnswer(
data.select(
input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()")
).limit(1),
Row("", -1L, -1L))
}
}
test("input_file_name, input_file_block_start, input_file_block_length - HadoopRDD") {
withTempPath { dir =>
val data = sparkContext.parallelize((0 to 10).map(_.toString)).toDF()
data.write.text(dir.getCanonicalPath)
val df = spark.sparkContext.textFile(dir.getCanonicalPath).toDF()
// Test the 3 expressions when reading from files
val q = df.select(
input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()"))
val firstRow = q.head()
assert(firstRow.getString(0).contains(dir.toURI.getPath))
assert(firstRow.getLong(1) == 0)
assert(firstRow.getLong(2) > 0)
// Now read directly from the original RDD without going through any files to make sure
// we are returning empty string, -1, and -1.
checkAnswer(
data.select(
input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()")
).limit(1),
Row("", -1L, -1L))
}
}
test("input_file_name, input_file_block_start, input_file_block_length - NewHadoopRDD") {
withTempPath { dir =>
val data = sparkContext.parallelize((0 to 10).map(_.toString)).toDF()
data.write.text(dir.getCanonicalPath)
val rdd = spark.sparkContext.newAPIHadoopFile(
dir.getCanonicalPath,
classOf[NewTextInputFormat],
classOf[LongWritable],
classOf[Text])
val df = rdd.map(pair => pair._2.toString).toDF()
// Test the 3 expressions when reading from files
val q = df.select(
input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()"))
val firstRow = q.head()
assert(firstRow.getString(0).contains(dir.toURI.getPath))
assert(firstRow.getLong(1) == 0)
assert(firstRow.getLong(2) > 0)
// Now read directly from the original RDD without going through any files to make sure
// we are returning empty string, -1, and -1.
checkAnswer(
data.select(
input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()")
).limit(1),
Row("", -1L, -1L))
}
}
test("columns can be compared") {
assert($"key".desc == $"key".desc)
assert($"key".desc != $"key".asc)
}
test("alias with metadata") {
val metadata = new MetadataBuilder()
.putString("originName", "value")
.build()
val schema = testData
.select($"*", col("value").as("abc", metadata))
.schema
assert(schema("value").metadata === Metadata.empty)
assert(schema("abc").metadata === metadata)
}
test("rand") {
val randCol = testData.select($"key", rand(5L).as("rand"))
randCol.columns.length should be (2)
val rows = randCol.collect()
rows.foreach { row =>
assert(row.getDouble(1) <= 1.0)
assert(row.getDouble(1) >= 0.0)
}
def checkNumProjects(df: DataFrame, expectedNumProjects: Int): Unit = {
val projects = df.queryExecution.sparkPlan.collect {
case tungstenProject: ProjectExec => tungstenProject
}
assert(projects.size === expectedNumProjects)
}
// We first create a plan with two Projects.
// Project [rand + 1 AS rand1, rand - 1 AS rand2]
// Project [key, (Rand 5 + 1) AS rand]
// LogicalRDD [key, value]
// Because Rand function is not deterministic, the column rand is not deterministic.
// So, in the optimizer, we will not collapse Project [rand + 1 AS rand1, rand - 1 AS rand2]
// and Project [key, Rand 5 AS rand]. The final plan still has two Projects.
val dfWithTwoProjects =
testData
.select($"key", (rand(5L) + 1).as("rand"))
.select(($"rand" + 1).as("rand1"), ($"rand" - 1).as("rand2"))
checkNumProjects(dfWithTwoProjects, 2)
// Now, we add one more project rand1 - rand2 on top of the query plan.
// Since rand1 and rand2 are deterministic (they basically apply +/- to the generated
// rand value), we can collapse rand1 - rand2 to the Project generating rand1 and rand2.
// So, the plan will be optimized from ...
// Project [(rand1 - rand2) AS (rand1 - rand2)]
// Project [rand + 1 AS rand1, rand - 1 AS rand2]
// Project [key, (Rand 5 + 1) AS rand]
// LogicalRDD [key, value]
// to ...
// Project [((rand + 1 AS rand1) - (rand - 1 AS rand2)) AS (rand1 - rand2)]
// Project [key, Rand 5 AS rand]
// LogicalRDD [key, value]
val dfWithThreeProjects = dfWithTwoProjects.select($"rand1" - $"rand2")
checkNumProjects(dfWithThreeProjects, 2)
dfWithThreeProjects.collect().foreach { row =>
assert(row.getDouble(0) === 2.0 +- 0.0001)
}
}
test("randn") {
val randCol = testData.select($"key", randn(5L).as("rand"))
randCol.columns.length should be (2)
val rows = randCol.collect()
rows.foreach { row =>
assert(row.getDouble(1) <= 4.0)
assert(row.getDouble(1) >= -4.0)
}
}
test("bitwiseAND") {
checkAnswer(
testData2.select($"a".bitwiseAND(75)),
testData2.collect().toSeq.map(r => Row(r.getInt(0) & 75)))
checkAnswer(
testData2.select($"a".bitwiseAND($"b").bitwiseAND(22)),
testData2.collect().toSeq.map(r => Row(r.getInt(0) & r.getInt(1) & 22)))
}
test("bitwiseOR") {
checkAnswer(
testData2.select($"a".bitwiseOR(170)),
testData2.collect().toSeq.map(r => Row(r.getInt(0) | 170)))
checkAnswer(
testData2.select($"a".bitwiseOR($"b").bitwiseOR(42)),
testData2.collect().toSeq.map(r => Row(r.getInt(0) | r.getInt(1) | 42)))
}
test("bitwiseXOR") {
checkAnswer(
testData2.select($"a".bitwiseXOR(112)),
testData2.collect().toSeq.map(r => Row(r.getInt(0) ^ 112)))
checkAnswer(
testData2.select($"a".bitwiseXOR($"b").bitwiseXOR(39)),
testData2.collect().toSeq.map(r => Row(r.getInt(0) ^ r.getInt(1) ^ 39)))
}
test("typedLit") {
val df = Seq(Tuple1(0)).toDF("a")
// Only check the types `lit` cannot handle
checkAnswer(
df.select(typedLit(Seq(1, 2, 3))),
Row(Seq(1, 2, 3)) :: Nil)
checkAnswer(
df.select(typedLit(Map("a" -> 1, "b" -> 2))),
Row(Map("a" -> 1, "b" -> 2)) :: Nil)
checkAnswer(
df.select(typedLit(("a", 2, 1.0))),
Row(Row("a", 2, 1.0)) :: Nil)
}
test("SPARK-31563: sql of InSet for UTF8String collection") {
val inSet = InSet(Literal("a"), Set("a", "b").map(UTF8String.fromString))
assert(inSet.sql === "('a' IN ('a', 'b'))")
}
}
| kevinyu98/spark | sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala | Scala | apache-2.0 | 28,625 |
package org.jetbrains.plugins.scala.lang.resolveSemanticDb
import com.intellij.psi._
import com.intellij.psi.impl.source.PsiAnnotationMethodImpl
import com.intellij.psi.search.GlobalSearchScope
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.ScPackageLike
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScBlockStatement
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScClassParameter
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFun, ScTypeAlias, ScValueOrVariable}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScPackaging
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScObject
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.synthetic.{ScSyntheticClass, ScSyntheticFunction, SyntheticClasses}
object ComparisonSymbol {
// sometimes we resolve to AnyRef instead of Object and the other way around... don't bother with these mistakes
private def stripBases(s: String): String =
s.stripPrefix("scala/AnyRef#")
.stripPrefix("scala/Any#")
.stripPrefix("java/lang/Object#")
.stripPrefix("java/lang/CharSequence#")
def fromSemanticDb(s: String): String =
stripBases(
s.replaceAll(raw"\\(\\+\\d+\\)", "()") // remove overloading index
.replaceAll(raw"[^#./()]+\\$$package.", "") // ignore package object path part
)
def fromPsi(e: PsiNamedElement): String = {
val buffer = new StringBuilder()
def add(s: String): Unit = buffer ++= s
def escaped(s: String): String = {
def isStart(c: Char): Boolean = c.isUnicodeIdentifierStart || c == '_' || c == '$'
def isPart(c: Char): Boolean = c.isUnicodeIdentifierPart || c == '$'
if (s.headOption.contains('`')) s
else if (s.headOption.forall(isStart) && s.forall(isPart)) s
else s"`$s`"
}
def addName(name: String): Unit = {
assert(name != null)
add(escaped(name))
}
def addFqn(fqn: String): Unit = {
val parts = fqn.split('.').map(escaped)
add(parts.mkString("/"))
}
def addOwner(e: PsiNamedElement): Unit = {
(e, e.getContext) match {
case (o: ScObject, _) if o.isPackageObject =>
return
case (_: ScSyntheticClass, _) =>
return
case (s: ScSyntheticFunction, _) =>
val synthetics = SyntheticClasses.get(e.getProject)
val clazz = synthetics.getAll.collectFirst {
case synth: ScSyntheticClass if synth.syntheticMethods.values().contains(s) => synth
}
clazz match {
case Some(clazz) =>
add("scala/")
add(clazz.className)
add("#")
return
case None if s.isStringPlusMethod =>
add("java/lang/String#")
return
case None =>
throw new Exception(s"Cannot create comparison symbol for unknown synthetic function $s")
}
case (p: ScClassParameter, _) if p.isClassMember =>
addSymName(p.containingClass)
return
case (_: ScBlockStatement | _: ScTypeAlias, ctx: ScPackaging) =>
// this is for toplevel statements
addFqn(ctx.fqn)
add("/")
return
case _ =>
}
e.contexts.takeWhile(!_.is[PsiFile]).collectFirst {
//case `e` => e.parents.collectFirst { case e: ScNamedElement => e }.foreach(addOwner)
case ctx: PsiNamedElement => ctx
} match {
case Some(e) => addSymName(e)
case None =>
val hasPackage = e.getContainingFile match {
case p: PsiClassOwner if p.getPackageName.nonEmpty => true
case _ => false
}
if (!hasPackage) {
add("_empty_/")
}
}
}
def addSymName(e: PsiNamedElement): Unit = {
if (e.name == null)
return
addOwner(e)
e match {
case _: PsiAnnotationMethodImpl =>
add("`<init>`().")
case _ =>
}
e match {
case p: ScClassParameter if p.isClassMember =>
addName(p.name)
add(".")
return
case p@(_: PsiParameter | _: PsiAnnotationMethodImpl) =>
add("(")
addName(p.name)
add(")")
return
case c: PsiClass if c.containingClass == null =>
val qualName = c.qualifiedName
if (qualName == null) addName(c.name)
else addFqn(qualName)
case _ => addName(e.name)
}
e match {
case _: ScPackageLike => add("/")
case o: ScObject if o.isPackageObject => add("/package.")
case _: ScObject => add(".")
case _: PsiEnumConstant => add(".")
case f: PsiField if f.hasModifierProperty(PsiModifier.FINAL) => add(".")
//case c: PsiClass if c.isInterface && isInImport => add(".")
case _: PsiClass | _: PsiType | _: ScTypeAlias | _: ScSyntheticClass => add("#")
case _: PsiField | _ : ScFun | _: PsiMethod | _: ScValueOrVariable => add("().")
case _ => add(".")
}
}
if (isInRefinement(e)) {
throw new Exception(s"Cannot create comparison symbol in refinement for $e")
}
e match {
case p: ScPackageLike if p.fqn == "" =>
add("_root_/")
case _ =>
addSymName(e)
}
stripBases(buffer.result().replace("scala/runtime/stdLibPatches/", "scala/"))
}
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/lang/resolveSemanticDb/ComparisonSymbol.scala | Scala | apache-2.0 | 5,444 |
package com.example
import com.rocketfuel.sdbc.H2._
case class TestClass(
id: Int,
value: String
)
object TestClass {
final case class Value(value: String)
object Value {
implicit val selectable: Selectable[Value, TestClass] =
Select[TestClass]("SELECT * FROM test_class WHERE value = @value").selectable[Value].product
implicit val updatable: Updatable[Value] =
Update("INSERT INTO test_class (value) VALUES (@value)").updatable[Value].product
}
final case class Id(id: Int)
object Id {
implicit val selectable: Selectable[Id, TestClass] =
Select[TestClass]("SELECT * FROM test_class WHERE id = @id").selectable[Id].product
}
final case class All(newValue: String)
object All {
implicit val selectable: Selectable[All.type, TestClass] =
Select[TestClass]("SELECT * FROM test_class").selectable[All.type].constant
implicit val updatable: SelectForUpdatable[All] =
SelectForUpdate("SELECT id, value FROM test_class").
selectForUpdatable.constant {
(key: All) => (row: UpdatableRow) =>
row("value") = key.newValue
row.updateRow()
}
}
}
| rocketfuel/sdbc | examples/src/main/scala/com/example/TestClass.scala | Scala | bsd-3-clause | 1,158 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.parser
import scala.collection.mutable
import org.apache.spark.sql.{CarbonEnv, SparkSession}
import org.apache.spark.sql.catalyst.parser.{AbstractSqlParser, ParseException, SqlBaseParser}
import org.apache.spark.sql.catalyst.parser.ParserUtils._
import org.apache.spark.sql.catalyst.parser.SqlBaseParser.{CreateTableContext, TablePropertyListContext}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.SparkSqlAstBuilder
import org.apache.spark.sql.execution.command.{BucketFields, CreateTable, Field, PartitionerField, TableModel}
import org.apache.spark.sql.internal.{SQLConf, VariableSubstitution}
import org.apache.carbondata.core.util.{CarbonSessionInfo, SessionParams, ThreadLocalSessionInfo}
import org.apache.carbondata.spark.CarbonOption
import org.apache.carbondata.spark.exception.MalformedCarbonCommandException
import org.apache.carbondata.spark.util.CommonUtil
/**
* Concrete parser for Spark SQL stateENABLE_INMEMORY_MERGE_SORT_DEFAULTments and carbon specific
* statements
*/
class CarbonSparkSqlParser(conf: SQLConf, sparkSession: SparkSession) extends AbstractSqlParser {
val astBuilder = new CarbonSqlAstBuilder(conf)
private val substitutor = new VariableSubstitution(conf)
override def parsePlan(sqlText: String): LogicalPlan = {
val carbonSessionInfo: CarbonSessionInfo = CarbonEnv.getInstance(sparkSession).carbonSessionInfo
ThreadLocalSessionInfo.setCarbonSessionInfo(carbonSessionInfo)
try {
super.parsePlan(sqlText)
} catch {
case ce: MalformedCarbonCommandException =>
throw ce
case ex =>
try {
astBuilder.parser.parse(sqlText)
} catch {
case mce: MalformedCarbonCommandException =>
throw mce
case e =>
sys
.error("\n" + "BaseSqlParser>>>> " + ex.getMessage + "\n" + "CarbonSqlParser>>>> " +
e.getMessage)
}
}
}
protected override def parse[T](command: String)(toResult: SqlBaseParser => T): T = {
super.parse(substitutor.substitute(command))(toResult)
}
}
class CarbonSqlAstBuilder(conf: SQLConf) extends SparkSqlAstBuilder(conf) {
val parser = new CarbonSpark2SqlParser
override def visitCreateTable(ctx: CreateTableContext): LogicalPlan = {
val fileStorage = Option(ctx.createFileFormat) match {
case Some(value) =>
if (value.children.get(1).getText.equalsIgnoreCase("by")) {
value.storageHandler().STRING().getSymbol.getText
} else {
// The case of "STORED AS PARQUET/ORC"
""
}
case _ => ""
}
if (fileStorage.equalsIgnoreCase("'carbondata'") ||
fileStorage.equalsIgnoreCase("'org.apache.carbondata.format'")) {
val (name, temp, ifNotExists, external) = visitCreateTableHeader(ctx.createTableHeader)
// TODO: implement temporary tables
if (temp) {
throw new ParseException(
"CREATE TEMPORARY TABLE is not supported yet. " +
"Please use CREATE TEMPORARY VIEW as an alternative.", ctx)
}
if (ctx.skewSpec != null) {
operationNotAllowed("CREATE TABLE ... SKEWED BY", ctx)
}
if (ctx.bucketSpec != null) {
operationNotAllowed("CREATE TABLE ... CLUSTERED BY", ctx)
}
val partitionByStructFields = Option(ctx.partitionColumns).toSeq.flatMap(visitColTypeList)
val partitionerFields = partitionByStructFields.map { structField =>
PartitionerField(structField.name, Some(structField.dataType.toString), null)
}
val cols = Option(ctx.columns).toSeq.flatMap(visitColTypeList)
val properties = Option(ctx.tablePropertyList).map(visitPropertyKeyValues)
.getOrElse(Map.empty)
// Ensuring whether no duplicate name is used in table definition
val colNames = cols.map(_.name)
if (colNames.length != colNames.distinct.length) {
val duplicateColumns = colNames.groupBy(identity).collect {
case (x, ys) if ys.length > 1 => "\"" + x + "\""
}
operationNotAllowed(s"Duplicated column names found in table definition of $name: " +
duplicateColumns.mkString("[", ",", "]"), ctx)
}
val tableProperties = mutable.Map[String, String]()
properties.foreach{property => tableProperties.put(property._1, property._2)}
// validate partition clause
if (partitionerFields.nonEmpty) {
if (!CommonUtil.validatePartitionColumns(tableProperties, partitionerFields)) {
throw new MalformedCarbonCommandException("Error: Invalid partition definition")
}
// partition columns should not be part of the schema
val badPartCols = partitionerFields.map(_.partitionColumn).toSet.intersect(colNames.toSet)
if (badPartCols.nonEmpty) {
operationNotAllowed(s"Partition columns should not be specified in the schema: " +
badPartCols.map("\"" + _ + "\"").mkString("[", ",", "]"), ctx)
}
}
val fields = parser.getFields(cols ++ partitionByStructFields)
val options = new CarbonOption(properties)
// validate tblProperties
val bucketFields = parser.getBucketFields(tableProperties, fields, options)
// prepare table model of the collected tokens
val tableModel: TableModel = parser.prepareTableModel(ifNotExists,
convertDbNameToLowerCase(name.database),
name.table.toLowerCase,
fields,
partitionerFields,
tableProperties,
bucketFields)
CreateTable(tableModel)
} else {
super.visitCreateTable(ctx)
}
}
/**
* This method will convert the database name to lower case
*
* @param dbName
* @return Option of String
*/
protected def convertDbNameToLowerCase(dbName: Option[String]): Option[String] = {
dbName match {
case Some(databaseName) => Some(databaseName.toLowerCase)
case None => dbName
}
}
/**
* Parse a key-value map from a [[TablePropertyListContext]], assuming all values are specified.
*/
private def visitPropertyKeyValues(ctx: TablePropertyListContext): Map[String, String] = {
val props = visitTablePropertyList(ctx)
val badKeys = props.filter { case (_, v) => v == null }.keys
if (badKeys.nonEmpty) {
operationNotAllowed(
s"Values must be specified for key(s): ${badKeys.mkString("[", ",", "]")}", ctx)
}
props.map { case (key, value) =>
if (needToConvertToLowerCase(key)) {
(key.toLowerCase, value.toLowerCase)
} else {
(key.toLowerCase, value)
}
}
}
private def needToConvertToLowerCase(key: String): Boolean = {
val noConvertList = Array("LIST_INFO", "RANGE_INFO")
!noConvertList.exists(x => x.equalsIgnoreCase(key));
}
}
| aniketadnaik/carbondataStreamIngest | integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala | Scala | apache-2.0 | 7,673 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.