code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package geotrellis.migration.cli
import TransformArgs._
import monocle.macros.GenLens
case class TransformArgs(
indexType: String = indexTypes.head, // zorder by default
tileType: String = tileTypes.head, // only for hadoop && file
xResolution: Int = 0, // for hilbert
yResolution: Int = 0, // for hilbert
format: String = "", // always empty, set by backend tools
temporalResolution: Option[Long] = None // spatial index by default, used for all space indexes
) {
if (!indexTypes.contains(indexType)) throw new Exception(s"Unsupported index type. Available index types: ${indexTypes.mkString(", ")}")
if (!tileTypes.contains(tileType)) throw new Exception(s"Unsupported tile type. Available tile types: ${tileTypes.mkString(", ")}")
}
object TransformArgs {
val indexTypes = List("zorder", "hilbert", "rowmajor")
val tileTypes = List("singleband", "multiband")
val backendTypes = List("hadoop", "file", "s3", "accumulo")
val indexType = GenLens[TransformArgs](_.indexType)
val tileType = GenLens[TransformArgs](_.tileType)
val xResolution = GenLens[TransformArgs](_.xResolution)
val yResolution = GenLens[TransformArgs](_.yResolution)
val format = GenLens[TransformArgs](_.format)
val temporalResolution = GenLens[TransformArgs](_.temporalResolution)
}
| lossyrob/geotrellis-migration-tool | src/main/scala/geotrellis/migration/cli/TransformArgs.scala | Scala | apache-2.0 | 1,348 |
/*
*************************************************************************************
* Copyright 2016 Normation SAS
*************************************************************************************
*
* This file is part of Rudder.
*
* Rudder is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU General Public License version 3, the copyright holders add
* the following Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General
* Public License version 3, when you create a Related Module, this
* Related Module is not considered as a part of the work and may be
* distributed under the license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* Rudder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Rudder. If not, see <http://www.gnu.org/licenses/>.
*
*************************************************************************************
*/
package com.normation.rudder.web
import net.liftweb._
import net.liftweb.http._
import net.liftweb.util.Helpers._
import net.liftweb.common._
import scala.xml.NodeSeq
/**
* A replacement for "chooseTemplate" from Lift 2.6.
* It is based on CSS selector.
* It should be just a temporary replacement of real
* use of CSS selector with correct HTML5 pages.
*
* If the template is missing, it produces a sys.error (fatale).
* Better for dev, but you should have your template.
*/
object ChooseTemplate {
/**
* Typical use case to replace chooseTemplate:
* ChooseTemplate("templates-hidden" :: "foo" :: Nil, "component-plop")
*
*/
def apply(templatePath: List[String], selector: String): NodeSeq = {
val xml = Templates(templatePath) match {
case eb:EmptyBox =>
sys.error(s"Template for path ${templatePath.mkString("/")}.html not found.")
case Full(x) => x
}
//chose children. The right part is ignored.
val select = (s"$selector ^*" #> "not relevant")
select(xml)
}
}
| armeniaca/rudder | rudder-web/src/main/scala/com/normation/rudder/web/ChooseTemplate.scala | Scala | gpl-3.0 | 2,727 |
package com.github.tminglei.slickpg
import org.scalatest.FunSuite
import slick.driver.PostgresDriver
import scala.concurrent.Await
import scala.concurrent.duration._
class PgEnumSupportSuite extends FunSuite {
object WeekDays extends Enumeration {
type WeekDay = Value
val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value
}
object Rainbows extends Enumeration {
type Rainbow = Value
val red, orange, yellow, green, blue, purple = Value
}
///---
trait Enum[A] {
trait Value { self: A =>
_values :+= this
}
private var _values = List.empty[A]
def values = _values map (v => (v.toString, v)) toMap
}
object Currency extends Enum[Currency]
sealed trait Currency extends Currency.Value
case object EUR extends Currency
case object GBP extends Currency
case object USD extends Currency
sealed abstract class Gender(val repr: String)
case object Female extends Gender("f")
case object Male extends Gender("m")
object Gender {
def fromString(s: String): Gender = s match {
case "f" => Female
case "m" => Male
}
def values = Seq(Female, Male)
}
/////////////////////////////////////////////////////////////////////
import WeekDays._
import Rainbows._
object MyPostgresDriver1 extends PostgresDriver with PgEnumSupport {
override val api = new API with MyEnumImplicits {}
trait MyEnumImplicits {
implicit val weekDayTypeMapper = createEnumJdbcType("WeekDay", WeekDays)
implicit val weekDayListTypeMapper = createEnumListJdbcType("weekDay", WeekDays)
implicit val rainbowTypeMapper = createEnumJdbcType("Rainbow", Rainbows, true)
implicit val rainbowListTypeMapper = createEnumListJdbcType("Rainbow", Rainbows, true)
implicit val weekDayColumnExtensionMethodsBuilder = createEnumColumnExtensionMethodsBuilder(WeekDays)
implicit val weekDayOptionColumnExtensionMethodsBuilder = createEnumOptionColumnExtensionMethodsBuilder(WeekDays)
implicit val rainbowColumnExtensionMethodsBuilder = createEnumColumnExtensionMethodsBuilder(Rainbows)
implicit val rainbowOptionColumnExtensionMethodsBuilder = createEnumOptionColumnExtensionMethodsBuilder(Rainbows)
/// custom types of java enums and algebraic data type (ADT)
implicit val currencyTypeMapper = createEnumJdbcType[Currency]("Currency", _.toString, Currency.values.get(_).get, quoteName = false)
implicit val currencyTypeListMapper = createEnumListJdbcType[Currency]("Currency", _.toString, Currency.values.get(_).get, quoteName = false)
implicit val languagesTypeMapper = createEnumJdbcType[Languages]("Languages", _.name(), Languages.valueOf, quoteName = true)
implicit val languagesTypeListMapper = createEnumListJdbcType[Languages]("Languages", _.name(), Languages.valueOf, quoteName = true)
implicit val genderTypeMapper = createEnumJdbcType[Gender]("Gender", _.repr, Gender.fromString, quoteName = false)
implicit val genderTypeListMapper = createEnumListJdbcType[Gender]("Gender", _.repr, Gender.fromString, quoteName = false)
implicit val currencyColumnExtensionMethodsBuilder = createEnumColumnExtensionMethodsBuilder[Currency]
implicit val currencyOptionColumnExtensionMethodsBuilder = createEnumOptionColumnExtensionMethodsBuilder[Currency]
implicit val languagesColumnExtensionMethodsBuilder = createEnumColumnExtensionMethodsBuilder[Languages]
implicit val languagesOptionColumnExtensionMethodsBuilder = createEnumOptionColumnExtensionMethodsBuilder[Languages]
implicit val genderColumnExtensionMethodsBuilder = createEnumColumnExtensionMethodsBuilder[Gender]
implicit val genderOptionColumnExtensionMethodsBuilder = createEnumOptionColumnExtensionMethodsBuilder[Gender]
}
}
////////////////////////////////////////////////////////////////////
import MyPostgresDriver1.api._
val db = Database.forURL(url = utils.dbUrl, driver = "org.postgresql.Driver")
case class TestEnumBean(
id: Long,
weekday: WeekDay,
rainbow: Option[Rainbow],
weekdays: List[WeekDay],
rainbows: List[Rainbow])
class TestEnumTable(tag: Tag) extends Table[TestEnumBean](tag, "test_enum_table") {
def id = column[Long]("id")
def weekday = column[WeekDay]("weekday", O.Default(Mon))
def rainbow = column[Option[Rainbow]]("rainbow")
def weekdays = column[List[WeekDay]]("weekdays")
def rainbows = column[List[Rainbow]]("rainbows")
def * = (id, weekday, rainbow, weekdays, rainbows) <> (TestEnumBean.tupled, TestEnumBean.unapply)
}
val TestEnums = TableQuery(new TestEnumTable(_))
///---
case class TestEnumBean1(
id: Long,
currency: Currency,
language: Option[Languages],
gender: Gender,
currencies: List[Currency],
languages: List[Languages])
class TestEnumTable1(tag: Tag) extends Table[TestEnumBean1](tag, "test_enum_table_1") {
def id = column[Long]("id")
def currency = column[Currency]("currency")
def language = column[Option[Languages]]("language")
def gender = column[Gender]("gender")
def currencies = column[List[Currency]]("currencies")
def languages = column[List[Languages]]("languages")
def * = (id, currency, language, gender, currencies, languages) <> (TestEnumBean1.tupled, TestEnumBean1.unapply)
}
val TestEnums1 = TableQuery(new TestEnumTable1(_))
//------------------------------------------------------------------
val testRec1 = TestEnumBean(101L, Mon, Some(red), Nil, List(red, yellow))
val testRec2 = TestEnumBean(102L, Wed, Some(blue), List(Sat, Sun), List(green))
val testRec3 = TestEnumBean(103L, Fri, None, List(Thu), Nil)
val testRec11 = TestEnumBean1(101L, EUR, Some(Languages.SCALA), Male, Nil, List(Languages.SCALA, Languages.CLOJURE))
val testRec12 = TestEnumBean1(102L, GBP, None, Female, List(EUR, GBP, USD), List(Languages.JAVA))
val testRec13 = TestEnumBean1(103L, USD, Some(Languages.CLOJURE), Male, List(GBP), Nil)
test("Enum Lifted support") {
Await.result(db.run(
DBIO.seq(
PgEnumSupportUtils.buildCreateSql("WeekDay", WeekDays),
PgEnumSupportUtils.buildCreateSql("Rainbow", Rainbows, true),
(TestEnums.schema) create,
///
TestEnums forceInsertAll List(testRec1, testRec2, testRec3)
).andThen(
DBIO.seq(
TestEnums.sortBy(_.id).to[List].result.map(
r => assert(List(testRec1, testRec2, testRec3) === r)
),
// first
TestEnums.filter(_.id === 101L.bind).map(t => t.weekday.first).result.head.map(
r => assert(Mon === r)
),
// last
TestEnums.filter(_.id === 101L.bind).map(t => t.rainbow.last).result.head.map(
r => assert(Some(purple) === r)
),
// all
TestEnums.filter(_.id === 101L.bind).map(t => t.weekday.all).result.head.map(
r => assert(WeekDays.values.toList === r)
),
// range
TestEnums.filter(_.id === 102L.bind).map(t => t.weekday range null.asInstanceOf[WeekDay]).result.head.map(
r => assert(List(Wed, Thu, Fri, Sat, Sun) === r)
),
TestEnums.filter(_.id === 102L.bind).map(t => null.asInstanceOf[WeekDay].bind range t.weekday).result.head.map(
r => assert(List(Mon, Tue, Wed) === r)
)
)
).andFinally(
DBIO.seq(
(TestEnums.schema) drop,
PgEnumSupportUtils.buildDropSql("Rainbow", true),
PgEnumSupportUtils.buildDropSql("weekday")
)
) .transactionally
), Duration.Inf)
}
test("Custom enum Lifted support") {
Await.result(db.run(
DBIO.seq(
PgEnumSupportUtils.buildCreateSql("Currency", Currency.values.toStream.map(_._1), quoteName = false),
PgEnumSupportUtils.buildCreateSql("Languages", Languages.values.toStream.map(_.name()), quoteName = true),
PgEnumSupportUtils.buildCreateSql("Gender", Gender.values.map(_.repr), quoteName = false),
(TestEnums1.schema) create,
///
TestEnums1 forceInsertAll List(testRec11, testRec12, testRec13)
).andThen(
DBIO.seq(
TestEnums1.sortBy(_.id).to[List].result.map(
r => assert(List(testRec11, testRec12, testRec13) === r)
),
// first
TestEnums1.filter(_.id === 101L.bind).map(t => t.currency.first).result.head.map(
r => assert(EUR === r)
),
// last
TestEnums1.filter(_.id === 101L.bind).map(t => t.language.last).result.head.map(
r => assert(Some(Languages.CLOJURE) === r)
),
// all
TestEnums1.filter(_.id === 101L.bind).map(t => t.currency.all).result.head.map(
r => assert(Currency.values.toList.map(_._2) === r)
),
// range
TestEnums1.filter(_.id === 102L.bind).map(t => t.currency range null.asInstanceOf[Currency]).result.head.map(
r => assert(List(GBP, USD) === r)
),
TestEnums1.filter(_.id === 102L.bind).map(t => null.asInstanceOf[Currency].bind range t.currency).result.head.map(
r => assert(List(EUR, GBP) === r)
),
TestEnums1.filter(_.gender === (Female: Gender)).result.map(
r => assert(List(testRec12) === r)
)
)
).andFinally(
DBIO.seq(
(TestEnums1.schema) drop,
PgEnumSupportUtils.buildDropSql("Currency"),
PgEnumSupportUtils.buildDropSql("Languages", true),
PgEnumSupportUtils.buildDropSql("Gender")
)
) .transactionally
), Duration.Inf)
}
}
| vikraman/slick-pg | src/test/scala/com/github/tminglei/slickpg/PgEnumSupportSuite.scala | Scala | bsd-2-clause | 9,645 |
/*
* Copyright 2013 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.storehaus
import com.twitter.util.{ Await, Future }
import org.scalacheck.{Prop, Arbitrary, Properties}
import org.scalacheck.Prop._
object StoreProperties extends Properties("Store") {
def baseTest[K: Arbitrary, V: Arbitrary: Equiv](storeIn: => Store[K, V])
(put: (Store[K, V], List[(K, Option[V])]) => Map[K, Option[V]]): Prop = {
forAll { (examples: List[(K, Option[V])]) =>
lazy val store = storeIn
put(store, examples).forall { case (k, optV) =>
Equiv[Option[V]].equiv(Await.result(store.get(k)), optV)
}
}
}
def putStoreTest[K: Arbitrary, V: Arbitrary: Equiv](store: => Store[K, V]): Prop =
baseTest(store) { (s, pairs) =>
Await.result(pairs.foldLeft(Future.Unit) { (fOld, p) => fOld.flatMap { _ => s.put(p) } })
pairs.toMap
}
def multiPutStoreTest[K: Arbitrary, V: Arbitrary: Equiv](store: => Store[K, V]): Prop =
baseTest(store) { (s, pairs) =>
Await.result(FutureOps.mapCollect(s.multiPut(pairs.toMap)))
pairs.toMap
}
def storeTest[K: Arbitrary, V: Arbitrary: Equiv](store: => Store[K, V]): Prop =
putStoreTest(store) && multiPutStoreTest(store)
def sparseStoreTest[K: Arbitrary, V: Arbitrary: Equiv](
norm: Option[V] => Option[V])(store: => Store[K, V]): Prop =
baseTest(store) { (s, pairs) =>
Await.result(pairs.foldLeft(Future.Unit) { (fOld, p) => fOld.flatMap { _ => s.put(p) } })
pairs.toMap.mapValues(norm)
} && baseTest(store) { (s, pairs) =>
Await.result(FutureOps.mapCollect(s.multiPut(pairs.toMap)))
pairs.toMap.mapValues(norm)
}
property("ConcurrentHashMapStore test") =
storeTest(new ConcurrentHashMapStore[String, Int]())
property("Or works as expected") = forAll { (m1: Map[String, Int], m2: Map[String, Int]) =>
val orRO = ReadableStore.first(Seq(ReadableStore.fromMap(m1), ReadableStore.fromMap(m2)))
(m1.keySet ++ m2.keySet).forall { k =>
(Await.result(orRO.get(k)) == m1.get(k)) ||
(Await.result(orRO.get(k)) == m2.get(k))
}
}
}
| twitter/storehaus | storehaus-core/src/test/scala/com/twitter/storehaus/StoreProperties.scala | Scala | apache-2.0 | 2,648 |
package processes.freeMonads.vanillaScala.multiple
import processes.freeMonads.vanillaScala.MultipleMachinery
import processes.freeMonads.multiple.CompleteProgramParts
import scala.language.higherKinds
import scala.concurrent.Future
/*
* Warning, this file contains stuff that might melt your brain. Type level
* programming relies (just as some functional programming) a lot on recursive
* style thingies. Human brains seem not to be able to handle that very good.
*/
trait ProgramMerger { _: MultipleMachinery with CompleteProgramParts =>
implicit class ProgramEnhancement[O[_], A](program: Free[O, A]) {
def mergeBranch(implicit merger: BranchMerger[O, A]) = merger merged program
}
trait BranchMerger[O[_], A] {
type Out[_]
def merged(program: Free[O, A]): Free[Out, A]
}
object BranchMerger {
implicit def merger[O[_], T](
implicit containsMergableBranch: Branch[T]#Instance ~> O,
result: O - Branch[T]#Instance) = {
// This is an intermediary type to store the branch. T and A will be
// the same (containsMergeableBranch, see above) proves that.
type FreeWithoutBranch[A] = Free[result.Out, Either[T, A]]
// To turn Free[O, A] into FreeWithoutBranch[A] we need FreeWithoutBranch
// to have a monad.
implicit val freeWithoutBranchMonad = new Monad[FreeWithoutBranch] {
def create[A](a: A): Free[result.Out, Either[T, A]] = Free(Right(a))
def flatMap[A, B](fa: Free[result.Out, Either[T, A]])(f: A => Free[result.Out, Either[T, B]]) =
fa.flatMap {
case Left(t) => Free[result.Out, Either[T, B]](Left(t))
case Right(a) => f(a)
}
}
// A way to extract the value of the branch and put it into the new
// free instance
val mapper = new (O ~> FreeWithoutBranch) {
def apply[A](o: O[A]): FreeWithoutBranch[A] =
result.removeFrom(o) match {
case Left(a) => Free(Left(a.value))
case Right(b) => Free.lift(b).map(Right.apply)
}
}
// merging now seems simple
new BranchMerger[O, T] {
type Out[x] = result.Out[x]
def merged(program: Free[O, T]): Free[Out, T] =
program.run(mapper).map(_.merge)
}
}
}
// Type to represent a removal. We remove T from O
trait Remove[O[_], T[_]] {
type Out[_]
// If T was in there, we remove it, otherwise we return the result
def removeFrom[A](o: O[A]): Either[T[A], Out[A]]
}
// Simple alias to allow the fance O - T notation
type -[O[_], T[_]] = Remove[O, T]
object Remove {
implicit def atHead[T[_], Tail[_]] =
new Remove[Co[T, Tail]#Product, T] {
type Out[x] = Tail[x]
def removeFrom[A](o: Co[T, Tail]#Product[A]): Either[T[A], Out[A]] =
o.value
}
implicit def atTail[T[_], Head[_]] =
new Remove[Co[Head, T]#Product, T] {
type Out[x] = Head[x]
def removeFrom[A](o: Co[Head, T]#Product[A]): Either[T[A], Out[A]] =
o.value.swap
}
implicit def inTail[T[_], Head[_], Tail[_]](
implicit resultType: Tail - T) =
new Remove[Co[Head, Tail]#Product, T] {
type Out[x] = Co[Head, resultType.Out]#Product[x]
def removeFrom[A](o: Co[Head, Tail]#Product[A]): Either[T[A], Out[A]] =
o.value match {
case Left(a) =>
Right(Co[Head, resultType.Out].Product(Left(a)))
case Right(b) =>
resultType.removeFrom(b) match {
case Left(a) => Left(a)
case Right(b) => Right(Co[Head, resultType.Out].Product(Right(b)))
}
}
}
}
} | EECOLOR/scala-clean-code-patterns | src/main/scala/processes/freeMonads/vanillaScala/multiple/ProgramMerger.scala | Scala | mit | 3,705 |
package io.udash
package rest.jetty
import com.avsystem.commons._
import com.avsystem.commons.annotation.explicitGenerics
import io.udash.rest.raw._
import io.udash.utils.URLEncoder
import monix.eval.Task
import org.eclipse.jetty.client.HttpClient
import org.eclipse.jetty.client.api.Result
import org.eclipse.jetty.client.util.{BufferingResponseListener, BytesContentProvider, StringContentProvider}
import org.eclipse.jetty.http.{HttpHeader, MimeTypes}
import java.net.HttpCookie
import java.nio.charset.Charset
import scala.concurrent.duration._
import scala.util.{Failure, Success}
object JettyRestClient {
final val DefaultMaxResponseLength = 2 * 1024 * 1024
final val DefaultTimeout = 10.seconds
@explicitGenerics def apply[RestApi: RawRest.AsRealRpc : RestMetadata](
client: HttpClient,
baseUri: String,
maxResponseLength: Int = DefaultMaxResponseLength,
timeout: Duration = DefaultTimeout
): RestApi =
RawRest.fromHandleRequest[RestApi](asHandleRequest(client, baseUri, maxResponseLength, timeout))
def asHandleRequest(
client: HttpClient,
baseUrl: String,
maxResponseLength: Int = DefaultMaxResponseLength,
timeout: Duration = DefaultTimeout
): RawRest.HandleRequest =
request => Task.async { callback =>
val path = baseUrl + PlainValue.encodePath(request.parameters.path)
val httpReq = client.newRequest(baseUrl).method(request.method.name)
httpReq.path(path)
request.parameters.query.entries.foreach {
case (name, PlainValue(value)) => httpReq.param(name, value)
}
request.parameters.headers.entries.foreach {
case (name, PlainValue(value)) => httpReq.header(name, value)
}
request.parameters.cookies.entries.foreach {
case (name, PlainValue(value)) => httpReq.cookie(new HttpCookie(
URLEncoder.encode(name, spaceAsPlus = true), URLEncoder.encode(value, spaceAsPlus = true)))
}
request.body match {
case HttpBody.Empty =>
case tb: HttpBody.Textual =>
httpReq.content(new StringContentProvider(tb.contentType, tb.content, Charset.forName(tb.charset)))
case bb: HttpBody.Binary =>
httpReq.content(new BytesContentProvider(bb.contentType, bb.bytes))
}
timeout match {
case fd: FiniteDuration => httpReq.timeout(fd.length, fd.unit)
case _ =>
}
httpReq.send(new BufferingResponseListener(maxResponseLength) {
override def onComplete(result: Result): Unit =
if (result.isSucceeded) {
val httpResp = result.getResponse
val contentTypeOpt = httpResp.getHeaders.get(HttpHeader.CONTENT_TYPE).opt
val charsetOpt = contentTypeOpt.map(MimeTypes.getCharsetFromContentType)
val body = (contentTypeOpt, charsetOpt) match {
case (Opt(contentType), Opt(charset)) =>
HttpBody.textual(getContentAsString, MimeTypes.getContentTypeWithoutCharset(contentType), charset)
case (Opt(contentType), Opt.Empty) =>
HttpBody.binary(getContent, contentType)
case _ =>
HttpBody.Empty
}
val headers = httpResp.getHeaders.asScala.iterator.map(h => (h.getName, PlainValue(h.getValue))).toList
val response = RestResponse(httpResp.getStatus, IMapping(headers), body)
callback(Success(response))
} else {
callback(Failure(result.getFailure))
}
})
}
}
| UdashFramework/udash-core | rest/jetty/src/main/scala/io/udash/rest/jetty/JettyRestClient.scala | Scala | apache-2.0 | 3,509 |
package controllers
import javax.inject._
import play.api.Logger
import play.api.libs.functional.syntax._
import play.api.libs.json.Reads._
import play.api.libs.json._
import play.api.mvc._
import play.modules.reactivemongo._
import reactivemongo.api.ReadPreference
import reactivemongo.play.json._
import reactivemongo.play.json.collection.JSONCollection
import scala.concurrent.{ExecutionContext, Future}
/**
* A bit more complex controller using a Json Coast-to-coast approach. There is no model for Person and some data is created dynamically on creation
* Input is directly converted to JsObject to be stored in MongoDB
*/
@Singleton
class PersonController @Inject()(val reactiveMongoApi: ReactiveMongoApi)(implicit exec: ExecutionContext) extends Controller with MongoController with ReactiveMongoComponents {
val transformer: Reads[JsObject] =
Reads.jsPickBranch[JsString](__ \\ "name") and
Reads.jsPickBranch[JsNumber](__ \\ "age") and
Reads.jsPut(__ \\ "created", JsNumber(new java.util.Date().getTime())) reduce
def personsFuture: Future[JSONCollection] = database.map(_.collection[JSONCollection]("persons"))
def create(name: String, age: Int) = Action.async {
val json = Json.obj(
"name" -> name,
"age" -> age,
"created" -> new java.util.Date().getTime())
for {
persons <- personsFuture
lastError <- persons.insert(json)
} yield Ok("Mongo LastError: %s".format(lastError))
}
def createFromJson = Action.async(parse.json) { request =>
request.body.transform(transformer) match {
case JsSuccess(person, _) =>
for {
persons <- personsFuture
lastError <- persons.insert(person)
}
yield {
Logger.debug(s"Successfully inserted with LastError: $lastError")
Created("Created 1 person")
}
case _ =>
Future.successful(BadRequest("invalid json"))
}
}
def createBulkFromJson = Action.async(parse.json) { request =>
//Transformation silent in case of failures.
val documents = for {
persons <- request.body.asOpt[JsArray].toStream
maybePerson <- persons.value
validPerson <- maybePerson.transform(transformer).asOpt.toList
} yield validPerson
for {
persons <- personsFuture
multiResult <- persons.bulkInsert(documents = documents, ordered = true)
} yield {
Logger.debug(s"Successfully inserted with multiResult: $multiResult")
Created(s"Created ${multiResult.n} person")
}
}
def findByName(name: String) = Action.async {
// let's do our query
val cursor: Future[List[JsObject]] = personsFuture.flatMap{ persons =>
// find all people with name `name`
persons.find(Json.obj("name" -> name)).
// sort them by creation date
sort(Json.obj("created" -> -1)).
// perform the query and get a cursor of JsObject
cursor[JsObject](ReadPreference.primary).collect[List]()
}
// everything's ok! Let's reply with a JsValue
cursor.map { persons =>
Ok(Json.toJson(persons))
}
}
def findAll() = Action.async {
// let's do our query
val cursor: Future[List[JsObject]] = personsFuture.flatMap{ persons =>
// find all people with name `name`
persons.find(Json.obj()).
// sort them by creation date
sort(Json.obj("created" -> -1)).
// perform the query and get a cursor of JsObject
cursor[JsObject](ReadPreference.primary).collect[List]()
}
// everything's ok! Let's reply with a JsValue
cursor.map { persons =>
Ok(Json.toJson(persons))
}
}
}
| LiebelJ/YellowDuckyChat | backend/app/controllers/PersonController.scala | Scala | mit | 3,620 |
/*
* Copyright (c) 2014, PagerDuty
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted
* provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions
* and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials provided with
* the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
* WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.pagerduty.sonar
import akka.actor._
import com.pagerduty.sonar.Supervisor.Heartbeat
import scala.concurrent.duration._
import com.pagerduty.sonar.demo.CommLogger
/**
* @author Aleksey Nikiforov
*/
object Watcher {
private val ActorNamePrefix = "watcher"
def actorName(i: Int): String = ActorNamePrefix + i
final val ActorName = s"$ActorNamePrefix(.*)".r
case object HeartbeatTimeout
case class VerifyAlert(address: Int)
case class FalseAlarm(address: Int)
}
/**
* Hash bucket manager actor will allocate one watcher per client. Watcher maintain a countdown,
* alerting when it reaches zero. When a watcher receives a heartbeat it will reset the countdown.
*/
class Watcher(val address: Int) extends CommLogger with Actor with ActorLogging {
import Watcher._
import context.dispatcher
val alertAfterDuration = context.system.settings.config.getInt(
"com.pagerduty.sonar.alertAfterDurationMs").millis
var heartbeatTimeout = nextTimeout()
var lastHeartbeat = System.currentTimeMillis
override def postStop(): Unit = {
heartbeatTimeout.cancel
super.postStop()
}
def nextTimeout(): Cancellable = {
context.system.scheduler.scheduleOnce(alertAfterDuration)(self.com ! HeartbeatTimeout)
}
def receive = {
case heartbeat @ Heartbeat(address, timeStamp) if this.address == address =>
heartbeatTimeout.cancel
heartbeatTimeout = nextTimeout()
lastHeartbeat = timeStamp
case HeartbeatTimeout =>
AlertManager.getRef(context).com ! AlertManager.PossibleAlert(address)
case VerifyAlert(address) if this.address == address =>
val sinceLastHeartbeat = System.currentTimeMillis - lastHeartbeat
if (sinceLastHeartbeat < alertAfterDuration.toMillis) sender.com ! FalseAlarm(address)
}
}
| PagerDuty/rt-actor-demo | src/main/scala/com/pagerduty/sonar/Watcher.scala | Scala | bsd-3-clause | 3,350 |
package org.jetbrains.plugins.scala.debugger.evaluateExpression
import org.jetbrains.plugins.scala.debugger.{ScalaDebuggerTestCase, ScalaVersion_2_11, ScalaVersion_2_12_M2}
/**
* User: Alefas
* Date: 20.10.11
*/
class ScalaThisAndSuperEvaluationTest extends ScalaThisAndSuperEvaluationTestBase with ScalaVersion_2_11
class ScalaThisAndSuperEvaluationTest_2_12_M2 extends ScalaThisAndSuperEvaluationTestBase with ScalaVersion_2_12_M2
abstract class ScalaThisAndSuperEvaluationTestBase extends ScalaDebuggerTestCase {
def testTraitThis() {
addFileToProject("Sample.scala",
"""
|object Sample {
| trait Z {
| def foo {
| "stop here"
| }
| }
| def main(args: Array[String]) {
| new Z {}.foo
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 3)
runDebugger("Sample") {
waitForBreakpoint()
evalStartsWith("this", "Sample$$anon")
}
}
def testSuperInvocation() {
addFileToProject("A.scala",
"""
|class A {
| def foo = 1
|}
""".stripMargin.trim()
)
addFileToProject("Sample.scala",
"""
|object Sample extends A {
| def main(args: Array[String]) {
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 2)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testInvocationFromInner() {
addFileToProject("A.scala",
"""
|class A {
| def foo = 1
|}
""".stripMargin.trim()
)
addFileToProject("Sample.scala",
"""
|object Sample extends A {
| trait Z {
| def goo {
| "stop here"
| }
| }
| def main(args: Array[String]) {
| new Z {}.goo
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 3)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testThisInvocationFromInner() {
addFileToProject("A.scala",
"""
|class A {
| def foo = 1
|}
""".stripMargin.trim()
)
addFileToProject("Sample.scala",
"""
|object Sample extends A {
| trait Z {
| def foo {
| "stop here"
| }
| }
| def main(args: Array[String]) {
| new Z {}.foo
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 3)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("Sample.this.foo", "1")
}
}
def testThisInvocationFromInnerClass() {
addFileToProject("A.scala",
"""
|class A {
| def foo = 1
|}
""".stripMargin.trim()
)
addFileToProject("Sample.scala",
"""
|class Simple extends A {
| trait Z {
| def foo {
| "stop here"
| }
| }
| def main(args: Array[String]) {
| new Z {}.foo
| }
|}
|object Sample {
| def main(args: Array[String]) {
| val sample = new Simple
| sample.main(args)
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 3)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("Simple.this.foo", "1")
}
}
def testSuperInvocationFromInner() {
addFileToProject("A.scala",
"""
|class A {
| def foo = 1
|}
""".stripMargin.trim()
)
addFileToProject("Sample.scala",
"""
|object Sample extends A {
| trait Z {
| def foo {
| "stop here"
| }
| }
| def main(args: Array[String]) {
| new Z {}.foo
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 3)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("Sample.super.foo", "1")
}
}
def testSuperTraitInvocationFromInner() {
addFileToProject("A.scala",
"""
|trait A {
| def foo = 1
|}
""".stripMargin.trim()
)
addFileToProject("Sample.scala",
"""
|class Simple extends A {
| trait Z {
| def foo {
| "stop here"
| }
| }
| def main(args: Array[String]) {
| new Z {}.foo
| }
|}
|object Sample {
| def main(args: Array[String]){
| new Simple().main(args)
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 3)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("Simple.super.foo", "1")
}
}
def testSuperTraitInvocation() {
addFileToProject("A.scala",
"""
|class A {
| def foo = 1
|}
""".stripMargin.trim()
)
addFileToProject("Sample.scala",
"""
|object Sample extends A {
| def main(args: Array[String]) {
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 2)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testOuterSuperInnerTraitInvocation() {
addFileToProject("A.scala",
"""
|class A {
| def foo = 1
|}
""".stripMargin.trim()
)
addFileToProject("Sample.scala",
"""
|trait IOI {
| def ioi = 2
|}
|trait E extends IOI {
| trait FF {
| def ioi = 1
| }
|
| trait F extends FF {
| def foo = {
| E.super.ioi
| "stop here"
| }
| }
| def moo {new F{}.foo}
|}
|object Sample extends A {
| def main(args: Array[String]) {
| new E {}.moo
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 11)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("E.super.ioi", "2")
}
}
def testInnerOuterEtc() {
addFileToProject("A.scala",
"""
|class A {
| def foo = 1
|}
""".stripMargin.trim()
)
addFileToProject("Sample.scala",
"""
|object Sample {
| class Outer extends A {
| trait Z {
| def goo {
| "stop here"
| }
| }
|
| def goo {
| new Z {}.goo
| }
| }
| def main(args: Array[String]) {
| new Outer().goo
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 4)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
} | LPTK/intellij-scala | test/org/jetbrains/plugins/scala/debugger/evaluateExpression/ScalaThisAndSuperEvaluationTest.scala | Scala | apache-2.0 | 6,681 |
package scorex.transaction
import org.h2.mvstore.MVStore
import scorex.block.Block
import scorex.block.Block.BlockId
import scorex.crypto.encode.Base58
import scorex.utils.ScorexLogging
import scala.util.{Failure, Success, Try}
/**
* Storage interface combining both history(blockchain/blocktree) and state
*/
trait BlockStorage extends ScorexLogging {
val db: MVStore
val MaxRollback: Int
val history: History
def state: LagonakiState
//Append block to current state
def appendBlock(block: Block): Try[Unit] = synchronized {
//TODO Rollback state for blocktree
history.appendBlock(block).map { blocks =>
blocks foreach { b =>
state.processBlock(b) match {
case Failure(e) =>
log.error("Failed to apply block to state", e)
db.rollback()
case Success(m) =>
db.commit()
}
}
}.recoverWith { case e =>
log.error("Failed to append block:", e)
Failure(e)
}
}
//Should be used for linear blockchain only
def removeAfter(signature: BlockId): Unit = synchronized {
history match {
case h: BlockChain => h.heightOf(signature) match {
case Some(height) =>
while (!h.lastBlock.uniqueId.sameElements(signature)) h.discardBlock()
state.rollbackTo(height)
case None =>
log.warn(s"RemoveAfter non-existing block ${Base58.encode(signature)}")
}
case _ =>
throw new RuntimeException("Not available for other option than linear blockchain")
}
}
}
object BlockStorage {
sealed trait Direction
case object Forward extends Direction
case object Reversed extends Direction
/*
* Block and direction to process it
*/
type BlocksToProcess = Seq[Block]
}
| input-output-hk/Scorex | scorex-basics/src/main/scala/scorex/transaction/BlockStorage.scala | Scala | cc0-1.0 | 1,772 |
package com.karasiq.bootstrap.alert
import com.karasiq.bootstrap.context.RenderingContext
trait AlertStyles { self: RenderingContext ⇒
import scalaTags.all._
final class AlertStyle private[alert](val styleName: String) extends ModifierFactory {
val createModifier = s"alert-$styleName".addClass
}
object AlertStyle {
lazy val success = new AlertStyle("success")
lazy val info = new AlertStyle("info")
lazy val warning = new AlertStyle("warning")
lazy val danger = new AlertStyle("danger")
}
}
| Karasiq/scalajs-bootstrap | library/shared/src/main/scala/com/karasiq/bootstrap/alert/AlertStyles.scala | Scala | mit | 529 |
package com.twitter.util
import com.twitter.conversions.DurationOps._
import org.scalatest.funsuite.AnyFunSuite
class TokenBucketTest extends AnyFunSuite {
test("a leaky bucket is leaky") {
Time.withCurrentTimeFrozen { tc =>
val b = TokenBucket.newLeakyBucket(3.seconds, 0, Stopwatch.timeMillis)
b.put(100)
assert(b.tryGet(1))
tc.advance(3.seconds)
assert(!b.tryGet(1))
}
}
test("tryGet fails when empty") {
Time.withCurrentTimeFrozen { tc =>
val b = TokenBucket.newLeakyBucket(3.seconds, 0, Stopwatch.timeMillis)
b.put(100)
assert(b.tryGet(50))
assert(b.tryGet(49))
assert(b.tryGet(1))
assert(!b.tryGet(1))
assert(!b.tryGet(50))
b.put(1)
assert(!b.tryGet(2))
assert(b.tryGet(1))
assert(!b.tryGet(1))
}
}
test("provisions reserves") {
Time.withCurrentTimeFrozen { tc =>
val b = TokenBucket.newLeakyBucket(3.seconds, 100, Stopwatch.timeMillis)
// start at 0, though with 100 in reserve
assert(b.tryGet(50)) // -50 + 100 = 0
assert(b.tryGet(50)) // -100 + 100 = 0
assert(!b.tryGet(1)) // nope, at 0
b.put(1) // now at -99 + 100 = 1
assert(b.tryGet(1)) // back to 0
tc.advance(1.second)
// This is what you get for eating
// all of your candy right away.
assert(!b.tryGet(1)) // still at -100 + 100 = 0
tc.advance(1.second)
assert(!b.tryGet(1)) // still at -100 + 100 = 0
tc.advance(1.second)
assert(!b.tryGet(1)) // still at -100 + 100 = 0
tc.advance(1.second)
assert(b.tryGet(50)) // the -100 expired, so -50 + 100 = 50
tc.advance(3.seconds) // the -50 expired, so -100 + 100 = 0
assert(b.tryGet(100))
assert(!b.tryGet(1))
}
}
test("TokenBucket.newBoundedBucket can put and get") {
val bucket = TokenBucket.newBoundedBucket(10)
bucket.put(5)
assert(bucket.count == 5)
assert(bucket.tryGet(3))
assert(bucket.count == 2)
bucket.put(6)
assert(bucket.count == 8)
assert(bucket.tryGet(2))
assert(bucket.count == 6)
assert(bucket.tryGet(5))
assert(bucket.count == 1)
assert(!bucket.tryGet(6))
assert(bucket.count == 1)
}
test("TokenBucket.newBoundedBucket is limited") {
val bucket = TokenBucket.newBoundedBucket(10)
bucket.put(15)
assert(bucket.count == 10)
assert(bucket.tryGet(10))
assert(bucket.count == 0)
assert(!bucket.tryGet(1))
assert(bucket.count == 0)
bucket.put(15)
assert(bucket.count == 10)
assert(!bucket.tryGet(11))
assert(bucket.count == 10)
}
}
| twitter/util | util-core/src/test/scala/com/twitter/util/TokenBucketTest.scala | Scala | apache-2.0 | 2,625 |
package uk.gov.gds.ier.validation
import uk.gov.gds.ier.model.Country
object CountryValidator {
def isScotland(country: Option[Country]):Boolean = {
country match {
case Some(Country("Scotland", _)) => true
case _ => false
}
}
}
| alphagov/ier-frontend | app/uk/gov/gds/ier/validation/CountryValidator.scala | Scala | mit | 256 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.copygrinder.pure.copybean.persistence
import org.copygrinder.pure.copybean.model.CopybeanImpl
import scala.collection.immutable.ListMap
class PredefinedCopybeans {
lazy val predefinedBeans = List(requiredValidator).map(bean => bean.id -> bean).toMap
val requiredValidator = new CopybeanImpl(
"validator.required",
Set("classBackedFieldValidator"),
ListMap(
"displayName" -> "Required",
"class" -> "org.copygrinder.pure.copybean.validator.RequiredValidator",
"isSingleton" -> true,
"applicableFieldTypes" -> Seq("*"),
"signature" -> ListMap()
)
)
} | copygrinder/copygrinder_server | src/main/scala/org/copygrinder/pure/copybean/persistence/PredefinedCopybeans.scala | Scala | apache-2.0 | 1,180 |
package mr.merc.image
import org.imgscalr.Scalr
import org.imgscalr.Scalr.{Method, Mode}
import scalafx.embed.swing.SwingFXUtils
import scalafx.scene.image.{Image, PixelFormat, WritableImage}
import java.nio.IntBuffer
object ImageUtil {
def mirrorVertically(image:Image):Image = {
val w = image.width.value.toInt
val h = image.height.value.toInt
val writableImage = new WritableImage(image.width.value.toInt, image.height.value.toInt)
val writer = writableImage.pixelWriter
val reader = image.pixelReader.get
val inputBuffer = IntBuffer.allocate((w + 1) * (h + 1))
val pf = PixelFormat.getIntArgbInstance
reader.getPixels(0, 0, w, h, pf, inputBuffer, w)
val outputBuffer = IntBuffer.allocate((w + 1) * (h + 1))
for (x <- 0 until w; y <- 0 until h) {
val mirroredX = w - x - 1
val argb = pf.getArgb(inputBuffer, x, y, w)
pf.setArgb(outputBuffer, mirroredX, y, w, argb)
}
writer.setPixels(0, 0, w, h, pf, outputBuffer, w)
writableImage
}
def mirrorHorizontally(image:Image):Image = {
val writableImage = new WritableImage(image.width.value.toInt, image.height.value.toInt)
val writer = writableImage.pixelWriter
val reader = image.pixelReader.get
for (x <- 0 until image.width.value.toInt;
y <- 0 until image.height.value.toInt) {
val height = image.height.value.toInt
val mirroredY = height - y - 1
val argb = reader.getArgb(x, y)
writer.setArgb(x, mirroredY, argb)
}
writableImage
}
def emptyImage:Image = {
val writableImage = new WritableImage(1, 1)
val writer = writableImage.pixelWriter
writer.setArgb(0, 0, 0)
writableImage
}
def scale(image: Image, factor: Double): Image = {
val bi = SwingFXUtils.fromFXImage(image, null)
val result = Scalr.resize(bi, Method.ULTRA_QUALITY, Mode.AUTOMATIC, bi.getWidth * factor toInt, bi.getHeight * factor toInt)
SwingFXUtils.toFXImage(result, null)
}
} | RenualdMarch/merc | src/main/scala/mr/merc/image/ImageUtil.scala | Scala | gpl-3.0 | 1,921 |
package scaladoc.resources
trait T {
/** $a */
def foo: Int
}
object O extends T {
val foo = 42
}
| scala/scala | test/scaladoc/resources/stray-dollar-sign-res.scala | Scala | apache-2.0 | 110 |
package com.github.vooolll.client.feed
import com.github.vooolll.base.FacebookClientSupport
import cats.implicits._
class FeedSpec extends FacebookClientSupport {
import com.github.vooolll.base.TestConfiguration._
"Facebook Graph Api" should {
"return feed" in { c =>
c.feed(userId) map (_.pictureWithoutQueryParams shouldBe feed)
}
"return feed result" in { c =>
c.feedResult(userId) map (_.map(_.pictureWithoutQueryParams) shouldBe feed.asRight)
}
}
}
| vooolll/facebook4s | src/test/scala/com/github/vooolll/client/feed/FeedSpec.scala | Scala | apache-2.0 | 493 |
package parser
import model.Movable
/**
* Created by salim on 12/08/2016.
*/
class Parser(subject:Movable) {
def parse(instruction: String): Unit = {
val instructionParts:List[String] = instruction.split("\\s+").toList
instructionParts match {
case Seq("go", direction) => subject.moveDirection(direction)
case _ => throw new ParserException(s"Cannot parse: ${instruction}")
}
}
}
| salimfadhley/scalamoo | src/main/scala/parser/Parser.scala | Scala | mit | 419 |
package japgolly.scalajs.react.test
import scala.scalajs.js.{Function1 => JFn1, Object, Array, UndefOr, undefined, Dynamic, native}
import scala.scalajs.js.annotation.JSName
import japgolly.scalajs.react._
/** https://facebook.github.io/react/docs/test-utils.html */
@JSName("React.addons.TestUtils")
object ReactTestUtils extends Object {
def Simulate: Simulate = native
/** Render a component into a detached DOM node in the document. This function requires a DOM. */
def renderIntoDocument(c: ReactElement): ComponentM = native
def renderIntoDocument[P,S,B,N <: TopNode](c: ReactComponentU[P,S,B,N]): ReactComponentM[P,S,B,N] = native
/**
* Pass a mocked component module to this method to augment it with useful methods that allow it to be used as a dummy
* React component. Instead of rendering as usual, the component will become a simple <div> (or other tag if
* mockTagName is provided) containing any provided children.
*/
def mockComponent(c: ComponentClass, tagName: String = native): Object = native
/** Returns true if instance is an instance of a React componentClass. */
def isComponentOfType(instance: ReactElement, c: ComponentClass): Boolean = native
/** Returns true if instance is a DOM component (such as a <div> or <span>). */
def isDOMComponent(instance: ReactElement): Boolean = native
/** Returns true if instance is a composite component (created with React.createClass()) */
def isCompositeComponent(instance: ReactElement): Boolean = native
/** The combination of isComponentOfType() and isCompositeComponent(). */
def isCompositeComponentWithType(instance: ReactElement, c: ComponentClass): Boolean = native
/** Returns true if instance is a plain text component. */
def isTextComponent(instance: ReactElement): Boolean = native
/**
* Traverse all components in tree and accumulate all components where test(component) is true.
* This is not that useful on its own, but it's used as a primitive for other test utils.
*/
def findAllInRenderedTree(tree: ComponentM, test: JFn1[ComponentM, Boolean]): Array[ComponentM] = native
/**
* Finds all instance of components in the rendered tree that are DOM components with the class name
* matching className.
*/
def scryRenderedDOMComponentsWithClass(tree: ComponentM, className: String): Array[ComponentM] = native
/**
* Like scryRenderedDOMComponentsWithClass() but expects there to be one result, and returns that one result, or
* throws exception if there is any other number of matches besides one.
*/
def findRenderedDOMComponentWithClass(tree: ComponentM, className: String): ComponentM = native
/**
* Finds all instance of components in the rendered tree that are DOM components with the tag name
* matching tagName.
*/
def scryRenderedDOMComponentsWithTag(tree: ComponentM, tagName: String): Array[ComponentM] = native
/**
* Like scryRenderedDOMComponentsWithTag() but expects there to be one result, and returns that one result, or
* throws exception if there is any other number of matches besides one.
*/
def findRenderedDOMComponentWithTag(tree: ComponentM, tagName: String): ComponentM = native
/** Finds all instances of components with type equal to componentClass. */
def scryRenderedComponentsWithType(tree: ComponentM, c: ComponentClass): Array[ComponentM] = native
/**
* Same as scryRenderedComponentsWithType() but expects there to be one result and returns that one result, or throws
* exception if there is any other number of matches besides one.
*/
def findRenderedComponentWithType(tree: ComponentM, c: ComponentClass): ComponentM = native
}
trait Simulate extends Object {
def beforeInput (t: ReactOrDomNode, eventData: Object = native): Unit = native
def blur (t: ReactOrDomNode, eventData: Object = native): Unit = native
def change (t: ReactOrDomNode, eventData: Object = native): Unit = native
def click (t: ReactOrDomNode, eventData: Object = native): Unit = native
def compositionEnd (t: ReactOrDomNode, eventData: Object = native): Unit = native
def compositionStart (t: ReactOrDomNode, eventData: Object = native): Unit = native
def compositionUpdate(t: ReactOrDomNode, eventData: Object = native): Unit = native
def contextMenu (t: ReactOrDomNode, eventData: Object = native): Unit = native
def copy (t: ReactOrDomNode, eventData: Object = native): Unit = native
def cut (t: ReactOrDomNode, eventData: Object = native): Unit = native
def doubleClick (t: ReactOrDomNode, eventData: Object = native): Unit = native
def drag (t: ReactOrDomNode, eventData: Object = native): Unit = native
def dragEnd (t: ReactOrDomNode, eventData: Object = native): Unit = native
def dragEnter (t: ReactOrDomNode, eventData: Object = native): Unit = native
def dragExit (t: ReactOrDomNode, eventData: Object = native): Unit = native
def dragLeave (t: ReactOrDomNode, eventData: Object = native): Unit = native
def dragOver (t: ReactOrDomNode, eventData: Object = native): Unit = native
def dragStart (t: ReactOrDomNode, eventData: Object = native): Unit = native
def drop (t: ReactOrDomNode, eventData: Object = native): Unit = native
def error (t: ReactOrDomNode, eventData: Object = native): Unit = native
def focus (t: ReactOrDomNode, eventData: Object = native): Unit = native
def input (t: ReactOrDomNode, eventData: Object = native): Unit = native
def keyDown (t: ReactOrDomNode, eventData: Object = native): Unit = native
def keyPress (t: ReactOrDomNode, eventData: Object = native): Unit = native
def keyUp (t: ReactOrDomNode, eventData: Object = native): Unit = native
def load (t: ReactOrDomNode, eventData: Object = native): Unit = native
def mouseDown (t: ReactOrDomNode, eventData: Object = native): Unit = native
def mouseEnter (t: ReactOrDomNode, eventData: Object = native): Unit = native
def mouseLeave (t: ReactOrDomNode, eventData: Object = native): Unit = native
def mouseMove (t: ReactOrDomNode, eventData: Object = native): Unit = native
def mouseOut (t: ReactOrDomNode, eventData: Object = native): Unit = native
def mouseOver (t: ReactOrDomNode, eventData: Object = native): Unit = native
def mouseUp (t: ReactOrDomNode, eventData: Object = native): Unit = native
def paste (t: ReactOrDomNode, eventData: Object = native): Unit = native
def reset (t: ReactOrDomNode, eventData: Object = native): Unit = native
def scroll (t: ReactOrDomNode, eventData: Object = native): Unit = native
def select (t: ReactOrDomNode, eventData: Object = native): Unit = native
def submit (t: ReactOrDomNode, eventData: Object = native): Unit = native
def touchCancel (t: ReactOrDomNode, eventData: Object = native): Unit = native
def touchEnd (t: ReactOrDomNode, eventData: Object = native): Unit = native
def touchMove (t: ReactOrDomNode, eventData: Object = native): Unit = native
def touchStart (t: ReactOrDomNode, eventData: Object = native): Unit = native
def wheel (t: ReactOrDomNode, eventData: Object = native): Unit = native
}
case class ChangeEventData(value: UndefOr[String] = undefined) {
def toJs: Object = {
val t = Dynamic.literal()
value.foreach(v => t.updateDynamic("value")(v))
val o = Dynamic.literal("target" -> t)
o
}
def simulate(t: ReactOrDomNode) = ReactTestUtils.Simulate.change(t, this)
def simulation = Simulation.change(this)
}
case class KeyboardEventData(key: UndefOr[String] = undefined,
location: UndefOr[Double] = undefined,
altKey: UndefOr[Boolean] = undefined,
ctrlKey: UndefOr[Boolean] = undefined,
metaKey: UndefOr[Boolean] = undefined,
shiftKey: UndefOr[Boolean] = undefined,
repeat: UndefOr[Boolean] = undefined,
locale: UndefOr[String] = undefined,
keyCode: UndefOr[Int] = undefined) {
def toJs: Object = {
val o = Dynamic.literal()
key .foreach(v => o.updateDynamic("key" )(v))
location.foreach(v => o.updateDynamic("location")(v))
altKey .foreach(v => o.updateDynamic("altKey" )(v))
ctrlKey .foreach(v => o.updateDynamic("ctrlKey" )(v))
metaKey .foreach(v => o.updateDynamic("metaKey" )(v))
shiftKey.foreach(v => o.updateDynamic("shiftKey")(v))
repeat .foreach(v => o.updateDynamic("repeat" )(v))
locale .foreach(v => o.updateDynamic("locale" )(v))
keyCode .foreach(v => o.updateDynamic("keyCode" )(v))
o
}
def simulateKeyDown (t: ReactOrDomNode): Unit = ReactTestUtils.Simulate.keyDown (t, this)
def simulateKeyPress (t: ReactOrDomNode): Unit = ReactTestUtils.Simulate.keyPress(t, this)
def simulateKeyUp (t: ReactOrDomNode): Unit = ReactTestUtils.Simulate.keyUp (t, this)
def simulateKeyDownUp (t: ReactOrDomNode): Unit = {simulateKeyDown(t); simulateKeyUp(t)}
def simulateKeyDownPressUp(t: ReactOrDomNode): Unit = {simulateKeyDown(t); simulateKeyPress(t); simulateKeyUp(t)}
def simulationKeyDown = Simulation.keyDown(this)
def simulationKeyPress = Simulation.keyPress(this)
def simulationKeyUp = Simulation.keyUp(this)
def simulationKeyDownUp = simulationKeyDown >> simulationKeyUp
def simulationKeyDownPressUp = simulationKeyDown >> simulationKeyPress >> simulationKeyUp
}
case class MouseEventData(screenX: UndefOr[Double] = undefined,
screenY: UndefOr[Double] = undefined,
clientX: UndefOr[Double] = undefined,
clientY: UndefOr[Double] = undefined,
altKey: UndefOr[Boolean] = undefined,
ctrlKey: UndefOr[Boolean] = undefined,
metaKey: UndefOr[Boolean] = undefined,
shiftKey: UndefOr[Boolean] = undefined,
buttons: UndefOr[Int] = undefined) {
def toJs: Object = {
val o = Dynamic.literal()
screenX .foreach(v => o.updateDynamic("screenX" )(v))
screenY .foreach(v => o.updateDynamic("screenY" )(v))
clientX .foreach(v => o.updateDynamic("clientX" )(v))
clientY .foreach(v => o.updateDynamic("clientY" )(v))
altKey .foreach(v => o.updateDynamic("altKey" )(v))
ctrlKey .foreach(v => o.updateDynamic("ctrlKey" )(v))
metaKey .foreach(v => o.updateDynamic("metaKey" )(v))
shiftKey.foreach(v => o.updateDynamic("shiftKey")(v))
buttons .foreach(v => o.updateDynamic("buttons" )(v))
o
}
def simulateDrag (t: ReactOrDomNode) = ReactTestUtils.Simulate.drag (t, this)
def simulateDragEnd (t: ReactOrDomNode) = ReactTestUtils.Simulate.dragEnd (t, this)
def simulateDragEnter (t: ReactOrDomNode) = ReactTestUtils.Simulate.dragEnter (t, this)
def simulateDragExit (t: ReactOrDomNode) = ReactTestUtils.Simulate.dragExit (t, this)
def simulateDragLeave (t: ReactOrDomNode) = ReactTestUtils.Simulate.dragLeave (t, this)
def simulateDragOver (t: ReactOrDomNode) = ReactTestUtils.Simulate.dragOver (t, this)
def simulateDragStart (t: ReactOrDomNode) = ReactTestUtils.Simulate.dragStart (t, this)
def simulateMouseDown (t: ReactOrDomNode) = ReactTestUtils.Simulate.mouseDown (t, this)
def simulateMouseEnter(t: ReactOrDomNode) = ReactTestUtils.Simulate.mouseEnter(t, this)
def simulateMouseLeave(t: ReactOrDomNode) = ReactTestUtils.Simulate.mouseLeave(t, this)
def simulateMouseMove (t: ReactOrDomNode) = ReactTestUtils.Simulate.mouseMove (t, this)
def simulateMouseOut (t: ReactOrDomNode) = ReactTestUtils.Simulate.mouseOut (t, this)
def simulateMouseOver (t: ReactOrDomNode) = ReactTestUtils.Simulate.mouseOver (t, this)
def simulateMouseUp (t: ReactOrDomNode) = ReactTestUtils.Simulate.mouseUp (t, this)
def simulateWheel (t: ReactOrDomNode) = ReactTestUtils.Simulate.wheel (t, this)
def simulationDrag = Simulation.drag (this)
def simulationDragEnd = Simulation.dragEnd (this)
def simulationDragEnter = Simulation.dragEnter (this)
def simulationDragExit = Simulation.dragExit (this)
def simulationDragLeave = Simulation.dragLeave (this)
def simulationDragOver = Simulation.dragOver (this)
def simulationDragStart = Simulation.dragStart (this)
def simulationMouseDown = Simulation.mouseDown (this)
def simulationMouseEnter = Simulation.mouseEnter(this)
def simulationMouseLeave = Simulation.mouseLeave(this)
def simulationMouseMove = Simulation.mouseMove (this)
def simulationMouseOut = Simulation.mouseOut (this)
def simulationMouseOver = Simulation.mouseOver (this)
def simulationMouseUp = Simulation.mouseUp (this)
def simulationWheel = Simulation.wheel (this)
}
| elacin/scalajs-react | test/src/main/scala/japgolly/scalajs/react/test/ReactTestUtils.scala | Scala | apache-2.0 | 13,218 |
/* Copyright (c) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gdata.data
package kinds
import com.google.xml.combinators.{Picklers, ~}
/**
* An organization, typically associated with a contact.
*
* @author Iulian Dragos
* @see http://code.google.com/apis/gdata/elements.html#gdOrganization
*/
case class Organization(
/** A string value used to identify this address. */
var label: Option[String],
/** A programmatic value that identifies the type of postal address. */
var rel: Option[String],
/** At most one phone number is the primary number. */
var primary: Boolean,
/** The organization name. */
var name: Option[String],
/** The title of a person within the organization. */
var title: Option[String])
object Organization {
import Picklers._
def pickler = {
implicit val ns = Uris.gdNs
(wrap (elem("organization", opt(attr("label", text))
~ opt(attr("rel", text)) ~ default(attr("primary", boolVal), false)
~ opt(elem("orgName", text)) ~ opt(elem("orgTitle", text))))
(Organization.apply) (fromOrganization))
}
private def fromOrganization(org: Organization) =
new ~(org.label, org.rel) ~ org.primary ~ org.name ~ org.title
}
| jeppenejsum/gdata-scala-client | src/com/google/gdata/data/kinds/Organization.scala | Scala | apache-2.0 | 1,812 |
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.kernel.protocol.v5
object HeaderBuilder {
/**
* Creates a new Header instance with the provided id and type.
* @param msgType The type of the message
* @param msgId (Optional) The unique identifier of the message, generates a
* random UUID if none is provided
* @return The new Header instance
*/
def create(
msgType: String,
msgId: UUID = java.util.UUID.randomUUID.toString
) = Header(
msgId,
SparkKernelInfo.username,
SparkKernelInfo.session,
msgType,
SparkKernelInfo.protocolVersion
)
/**
* Represents an "empty" header where the message type and id are blank.
*/
val empty = create("", "")
}
| bpburns/spark-kernel | protocol/src/main/scala/com/ibm/spark/kernel/protocol/v5/HeaderBuilder.scala | Scala | apache-2.0 | 1,303 |
package dotty.tools
package dotc
package reporting
import core.Contexts.Context
import collection.mutable
import Reporter._
/**
* This class implements a Reporter that stores all messages
*/
class ThrowingReporter(reportInfo: Reporter) extends Reporter {
protected def doReport(d: Diagnostic)(implicit ctx: Context): Unit = d match {
case _: Error => throw d
case _ => reportInfo.report(d)
}
}
| AlexSikia/dotty | src/dotty/tools/dotc/reporting/ThrowingReporter.scala | Scala | bsd-3-clause | 410 |
/*
* Copyright (c) 2017 sadikovi
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.github.sadikovi.riff.column
import java.nio.ByteBuffer
import org.apache.spark.sql.catalyst.InternalRow
import com.github.sadikovi.riff.io.OutputBuffer
import com.github.sadikovi.testutil.UnitTestSuite
class TimestampColumnFilterSuite extends UnitTestSuite {
test("update null values") {
val filter = new TimestampColumnFilter()
filter.hasNulls should be (false)
filter.update(InternalRow(null), 0)
filter.hasNulls should be (true)
}
test("update non-null values") {
val filter = new TimestampColumnFilter()
filter.update(InternalRow(1L), 0)
filter.update(InternalRow(2L), 0)
filter.update(InternalRow(10L), 0)
filter.mightContain(1L) should be (true)
filter.mightContain(2L) should be (true)
filter.mightContain(10L) should be (true)
filter.mightContain(-1L) should be (false)
filter.mightContain(3L) should be (false)
filter.mightContain(11L) should be (false)
}
test("equals") {
val filter = new TimestampColumnFilter()
assert(filter.equals(filter) === true)
assert(filter.equals(new TimestampColumnFilter(1024)) === true)
assert(filter.equals(null) === false)
}
test("toString") {
val filter = new TimestampColumnFilter(1024)
filter.update(InternalRow(null), 0)
filter.update(InternalRow(1L), 0)
assert(filter.toString.contains(
"TimestampColumnFilter[hasNulls=true, org.apache.spark.util.sketch.BloomFilterImpl"))
}
test("read/write empty filter") {
val buf = new OutputBuffer()
val filter1 = new TimestampColumnFilter(32)
filter1.writeExternal(buf)
val in = ByteBuffer.wrap(buf.array())
val filter2 = ColumnFilter.readExternal(in)
filter2 should be (filter1)
filter2.hasNulls should be (false)
}
test("read/write filter") {
val buf = new OutputBuffer()
val filter1 = new TimestampColumnFilter(32)
filter1.update(InternalRow(null), 0)
filter1.update(InternalRow(1L), 0)
filter1.update(InternalRow(2L), 0)
filter1.writeExternal(buf)
val in = ByteBuffer.wrap(buf.array())
val filter2 = ColumnFilter.readExternal(in)
filter2 should be (filter1)
filter2.hasNulls should be (true)
filter2.mightContain(1L) should be (true)
filter2.mightContain(2L) should be (true)
}
}
| sadikovi/riff | format/src/test/scala/com/github/sadikovi/riff/column/TimestampColumnFilterSuite.scala | Scala | mit | 3,399 |
package actors
import akka.actor._
import javax.inject.Inject
import play.api.db._
import anorm._
object CreateWorkflowActor {
def props = Props[CreateWorkflowActor]
case class CreateWorkflow(numberOfSteps: Int)
}
class CreateWorkflowActor @Inject()(db: Database) extends Actor {
import CreateWorkflowActor._
def createWorkflow(numberOfSteps: Int): Option[Long] = {
db.withConnection { implicit c =>
SQL("insert into workflow(number_of_steps) values ({numberOfSteps})")
.on('numberOfSteps -> numberOfSteps).executeInsert()
}
}
def receive = {
case CreateWorkflow(numberOfSteps: Int) =>
sender() ! createWorkflow(numberOfSteps)
}
}
| rmscardoso/workflows | app/actors/CreateWorkflowActor.scala | Scala | mit | 685 |
package de.kasoki.trierbustimetracker2.utils
import android.app.Activity
import android.graphics.Color
import android.graphics.drawable.ColorDrawable
object ActionBarHelper {
val actionBarColor = "#0356b9"
def colorActionBar(activity:Activity) {
if(AndroidHelper.currentApiLevel < 21) {
activity.getActionBar().setBackgroundDrawable(
new ColorDrawable(Color.parseColor(ActionBarHelper.actionBarColor))
)
}
}
def enableHomeAsUp(activity:Activity) {
activity.getActionBar().setDisplayHomeAsUpEnabled(true);
}
}
| kasoki/TrierBusTimeTracker | src/main/scala/de/kasoki/trierbustimetracker2/utils/ActionBarHelper.scala | Scala | mit | 597 |
/*
* Copyright 2010 LinkedIn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.log
import java.io._
import org.apache.log4j.Logger
import kafka.utils._
import scala.actors.Actor
import scala.collection._
import java.util.concurrent.CountDownLatch
import kafka.server.{KafkaConfig, KafkaZooKeeper}
import kafka.common.{InvalidTopicException, InvalidPartitionException}
/**
* The guy who creates and hands out logs
*/
@threadsafe
private[kafka] class LogManager(val config: KafkaConfig,
private val scheduler: KafkaScheduler,
private val time: Time,
val logCleanupIntervalMs: Long,
val logCleanupDefaultAgeMs: Long,
needRecovery: Boolean) {
val logDir: File = new File(config.logDir)
private val numPartitions = config.numPartitions
private val maxSize: Long = config.logFileSize
private val flushInterval = config.flushInterval
private val topicPartitionsMap = config.topicPartitionsMap
private val logger = Logger.getLogger(classOf[LogManager])
private val logCreationLock = new Object
private val random = new java.util.Random
private var kafkaZookeeper: KafkaZooKeeper = null
private var zkActor: Actor = null
private val startupLatch: CountDownLatch = if (config.enableZookeeper) new CountDownLatch(1) else null
private val logFlusherScheduler = new KafkaScheduler(1, "kafka-logflusher-", false)
private val logFlushIntervalMap = config.flushIntervalMap
private val logRetentionMSMap = getLogRetentionMSMap(config.logRetentionHoursMap)
/* Initialize a log for each subdirectory of the main log directory */
private val logs = new Pool[String, Pool[Int, Log]]()
if(!logDir.exists()) {
logger.info("No log directory found, creating '" + logDir.getAbsolutePath() + "'")
logDir.mkdirs()
}
if(!logDir.isDirectory() || !logDir.canRead())
throw new IllegalArgumentException(logDir.getAbsolutePath() + " is not a readable log directory.")
val subDirs = logDir.listFiles()
if(subDirs != null) {
for(dir <- subDirs) {
if(!dir.isDirectory()) {
logger.warn("Skipping unexplainable file '" + dir.getAbsolutePath() + "'--should it be there?")
} else {
logger.info("Loading log '" + dir.getName() + "'")
val log = new Log(dir, maxSize, flushInterval, needRecovery)
val topicPartion = Utils.getTopicPartition(dir.getName)
logs.putIfNotExists(topicPartion._1, new Pool[Int, Log]())
val parts = logs.get(topicPartion._1)
parts.put(topicPartion._2, log)
}
}
}
/* Schedule the cleanup task to delete old logs */
if(scheduler != null) {
logger.info("starting log cleaner every " + logCleanupIntervalMs + " ms")
scheduler.scheduleWithRate(cleanupLogs, 60 * 1000, logCleanupIntervalMs)
}
if(config.enableZookeeper) {
kafkaZookeeper = new KafkaZooKeeper(config, this)
kafkaZookeeper.startup
zkActor = new Actor {
def act() {
loop {
receive {
case topic: String =>
try {
kafkaZookeeper.registerTopicInZk(topic)
}
catch {
case e => logger.error(e) // log it and let it go
}
case StopActor =>
logger.info("zkActor stopped")
exit
}
}
}
}
zkActor.start
}
case object StopActor
private def getLogRetentionMSMap(logRetentionHourMap: Map[String, Int]) : Map[String, Long] = {
var ret = new mutable.HashMap[String, Long]
for ( (topic, hour) <- logRetentionHourMap )
ret.put(topic, hour * 60 * 60 * 1000L)
ret
}
/**
* Register this broker in ZK for the first time.
*/
def startup() {
if(config.enableZookeeper) {
kafkaZookeeper.registerBrokerInZk()
for (topic <- getAllTopics)
kafkaZookeeper.registerTopicInZk(topic)
startupLatch.countDown
}
logger.info("Starting log flusher every " + config.flushSchedulerThreadRate + " ms with the following overrides " + logFlushIntervalMap)
logFlusherScheduler.scheduleWithRate(flushAllLogs, config.flushSchedulerThreadRate, config.flushSchedulerThreadRate)
}
private def awaitStartup() {
if (config.enableZookeeper)
startupLatch.await
}
def registerNewTopicInZK(topic: String) {
if (config.enableZookeeper)
zkActor ! topic
}
/**
* Create a log for the given topic and the given partition
*/
private def createLog(topic: String, partition: Int): Log = {
logCreationLock synchronized {
val d = new File(logDir, topic + "-" + partition)
d.mkdirs()
new Log(d, maxSize, flushInterval, false)
}
}
def chooseRandomPartition(topic: String): Int = {
random.nextInt(topicPartitionsMap.getOrElse(topic, numPartitions))
}
/**
* Create the log if it does not exist, if it exists just return it
*/
def getOrCreateLog(topic: String, partition: Int): Log = {
awaitStartup
if (topic.length <= 0)
throw new InvalidTopicException("topic name can't be empty")
if (partition < 0 || partition >= topicPartitionsMap.getOrElse(topic, numPartitions)) {
logger.warn("Wrong partition " + partition + " valid partitions (0," +
(topicPartitionsMap.getOrElse(topic, numPartitions) - 1) + ")")
throw new InvalidPartitionException("wrong partition " + partition)
}
var hasNewTopic = false
var parts = logs.get(topic)
if (parts == null) {
val found = logs.putIfNotExists(topic, new Pool[Int, Log])
if (found == null)
hasNewTopic = true
parts = logs.get(topic)
}
var log = parts.get(partition)
if(log == null) {
log = createLog(topic, partition)
val found = parts.putIfNotExists(partition, log)
if(found != null) {
// there was already somebody there
log.close()
log = found
}
else
logger.info("Created log for '" + topic + "'-" + partition)
}
if (hasNewTopic)
registerNewTopicInZK(topic)
log
}
/**
* Delete any eligible logs. Return the number of segments deleted.
*/
def cleanupLogs() {
logger.debug("Beginning log cleanup...")
val iter = getLogIterator
var total = 0
val startMs = time.milliseconds
while(iter.hasNext) {
val log = iter.next
logger.debug("Garbage collecting '" + log.name + "'")
var logCleanupThresholdMS = this.logCleanupDefaultAgeMs
val topic = Utils.getTopicPartition(log.dir.getName)._1
if (logRetentionMSMap.contains(topic))
logCleanupThresholdMS = logRetentionMSMap(topic)
val toBeDeleted = log.markDeletedWhile(startMs - _.file.lastModified > logCleanupThresholdMS)
for(segment <- toBeDeleted) {
logger.info("Deleting log segment " + segment.file.getName() + " from " + log.name)
Utils.swallow(logger.warn, segment.messageSet.close())
if(!segment.file.delete())
logger.warn("Delete failed.")
else
total += 1
}
}
logger.debug("Log cleanup completed. " + total + " files deleted in " +
(time.milliseconds - startMs) / 1000 + " seconds")
}
/**
* Close all the logs
*/
def close() {
logFlusherScheduler.shutdown
val iter = getLogIterator
while(iter.hasNext)
iter.next.close()
if (config.enableZookeeper) {
zkActor ! StopActor
kafkaZookeeper.close
}
}
private def getLogIterator(): Iterator[Log] = {
new IteratorTemplate[Log] {
val partsIter = logs.values.iterator
var logIter: Iterator[Log] = null
override def makeNext(): Log = {
while (true) {
if (logIter != null && logIter.hasNext)
return logIter.next
if (!partsIter.hasNext)
return allDone
logIter = partsIter.next.values.iterator
}
// should never reach here
assert(false)
return allDone
}
}
}
private def flushAllLogs() = {
if (logger.isDebugEnabled)
logger.debug("flushing the high watermark of all logs")
for (log <- getLogIterator)
{
try{
val timeSinceLastFlush = System.currentTimeMillis - log.getLastFlushedTime
var logFlushInterval = config.defaultFlushIntervalMs
if(logFlushIntervalMap.contains(log.getTopicName))
logFlushInterval = logFlushIntervalMap(log.getTopicName)
if (logger.isDebugEnabled)
logger.debug(log.getTopicName + " flush interval " + logFlushInterval +
" last flushed " + log.getLastFlushedTime + " timesincelastFlush: " + timeSinceLastFlush)
if(timeSinceLastFlush >= logFlushInterval)
log.flush
}
catch {
case e =>
logger.error("error flushing " + log.getTopicName, e)
e match {
case _: IOException =>
logger.error("force shutdown due to error in flushAllLogs" + e)
Runtime.getRuntime.halt(1)
case _ =>
}
}
}
}
def getAllTopics(): Iterator[String] = logs.keys.iterator
def getTopicPartitionsMap() = topicPartitionsMap
}
| tcrayford/hafka | kafka/core/src/main/scala/kafka/log/LogManager.scala | Scala | bsd-3-clause | 9,815 |
package org.infinispan.spark.test
import java.util.Properties
import org.apache.spark.{SparkConf, SparkContext}
import org.infinispan.spark.rdd.InfinispanRDD
import org.scalatest.{BeforeAndAfterAll, Suite}
/**
* Trait to be mixed-in by tests that require a org.apache.spark.SparkContext
*
* @author gustavonalle
*/
trait Spark extends BeforeAndAfterAll {
this: Suite with RemoteTest =>
private lazy val config: SparkConf = new SparkConf().setMaster("local[8]").setAppName(this.getClass.getName)
protected var sc: SparkContext = _
def createInfinispanRDD[K, V] = {
new InfinispanRDD[K, V](sc, configuration = getConfiguration)
}
override protected def beforeAll(): Unit = {
sc = new SparkContext(config)
super.beforeAll()
}
override protected def afterAll(): Unit = {
sc.stop()
super.afterAll()
}
}
| rnowling/infinispan-spark | src/test/scala/org/infinispan/spark/test/Spark.scala | Scala | apache-2.0 | 867 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.eval
import cats.{Comonad, Eval}
import cats.effect.{ConcurrentEffect, Effect, IO, SyncIO}
import monix.execution.CancelablePromise
import scala.annotation.implicitNotFound
import scala.concurrent.Future
import scala.util.Try
/** A lawless type class that provides conversions to [[Task]].
*
* Sample:
* {{{
* // Conversion from cats.Eval
* import cats.Eval
*
* val source0 = Eval.always(1 + 1)
* val task0 = TaskLike[Eval].toTask(source0)
*
* // Conversion from Future
* import scala.concurrent.Future
*
* val source1 = Future.successful(1 + 1)
* val task1 = TaskLike[Future].toTask(source1)
*
* // Conversion from IO
* import cats.effect.IO
*
* val source2 = IO(1 + 1)
* val task2 = TaskLike[IO].toTask(source2)
* }}}
*
* This is an alternative to usage of `cats.effect.Effect`
* where the internals are specialized to `Task` anyway, like for
* example the implementation of `monix.reactive.Observable`.
*/
@implicitNotFound("""Cannot find implicit value for TaskLike[${F}].
Building this implicit value might depend on having an implicit
s.c.ExecutionContext in scope, a Scheduler or some equivalent type.""")
trait TaskLike[F[_]] {
/**
* Converts from `F[A]` to `Task[A]`, preserving referential
* transparency if `F[_]` is a pure data type and preserving
* interruptibility if the source is cancelable.
*/
def toTask[A](fa: F[A]): Task[A]
}
object TaskLike extends TaskLikeImplicits0 {
/**
* Returns the available instance for `F`.
*/
def apply[F[_]](implicit F: TaskLike[F]): TaskLike[F] = F
/**
* Instance for `Task`, returning same reference.
*/
implicit val fromTask: TaskLike[Task] =
new TaskLike[Task] {
def toTask[A](fa: Task[A]): Task[A] = fa
}
/**
* Converts to `Task` from [[scala.concurrent.Future]].
*/
implicit val fromFuture: TaskLike[Future] =
new TaskLike[Future] {
def toTask[A](fa: Future[A]): Task[A] =
Task.fromFuture(fa)
}
/**
* Converts to `Task` from [[Coeval]].
*/
implicit val fromCoeval: TaskLike[Coeval] =
new TaskLike[Coeval] {
def toTask[A](fa: Coeval[A]): Task[A] =
Task.coeval(fa)
}
/**
* Converts to `Task` from `cats.effect.Eval`.
*/
implicit val fromEval: TaskLike[Eval] =
new TaskLike[Eval] {
def toTask[A](fa: Eval[A]): Task[A] =
Task.fromEval(fa)
}
/**
* Converts to `Task` from
* [[https://typelevel.org/cats-effect/datatypes/io.html cats.effect.IO]].
*/
implicit val fromIO: TaskLike[IO] =
new TaskLike[IO] {
def toTask[A](fa: IO[A]): Task[A] =
Task.fromIO(fa)
}
/**
* Converts to `Task` from a `cats.effect.SyncIO`.
*/
implicit val fromSyncIO: TaskLike[SyncIO] =
new TaskLike[SyncIO] {
def toTask[A](fa: SyncIO[A]): Task[A] =
Task.fromIO(fa.toIO)
}
/**
* Converts `scala.util.Try` to [[Task]].
*/
implicit val fromTry: TaskLike[Try] =
new TaskLike[Try] {
def toTask[A](fa: Try[A]): Task[A] =
Task.fromTry(fa)
}
/**
* Converts [[monix.execution.CancelablePromise]] to [[Task]].
*/
implicit val fromCancelablePromise: TaskLike[CancelablePromise] =
new TaskLike[CancelablePromise] {
def toTask[A](p: CancelablePromise[A]): Task[A] =
Task.fromCancelablePromise(p)
}
/**
* Converts `Function0` (parameter-less function, also called
* thunks) to [[Task]].
*/
implicit val fromFunction0: TaskLike[Function0] =
new TaskLike[Function0] {
def toTask[A](thunk: () => A): Task[A] =
Task.Eval(thunk)
}
/**
* Converts a Scala `Either` to a [[Task]].
*/
implicit def fromEither[E <: Throwable]: TaskLike[Either[E, ?]] =
new TaskLike[Either[E, ?]] {
def toTask[A](fa: Either[E, A]): Task[A] =
Task.fromEither(fa)
}
}
private[eval] abstract class TaskLikeImplicits0 extends TaskLikeImplicits1 {
/**
* Converts to `Task` from
* [[https://typelevel.org/cats-effect/typeclasses/concurrent-effect.html cats.effect.ConcurrentEffect]].
*/
implicit def fromConcurrentEffect[F[_]](implicit F: ConcurrentEffect[F]): TaskLike[F] =
new TaskLike[F] {
def toTask[A](fa: F[A]): Task[A] =
Task.fromConcurrentEffect(fa)
}
}
private[eval] abstract class TaskLikeImplicits1 extends TaskLikeImplicits2 {
/**
* Converts to `Task` from
* [[https://typelevel.org/cats-effect/typeclasses/concurrent-effect.html cats.effect.Async]].
*/
implicit def fromEffect[F[_]](implicit F: Effect[F]): TaskLike[F] =
new TaskLike[F] {
def toTask[A](fa: F[A]): Task[A] =
Task.fromEffect(fa)
}
}
private[eval] abstract class TaskLikeImplicits2 {
/**
* Converts to `Task` from [[cats.Comonad]] values.
*/
implicit def fromComonad[F[_]](implicit F: Comonad[F]): TaskLike[F] =
new TaskLike[F] {
def toTask[A](fa: F[A]): Task[A] =
Task(F.extract(fa))
}
}
| ddworak/monix | monix-eval/shared/src/main/scala/monix/eval/TaskLike.scala | Scala | apache-2.0 | 5,704 |
package org.precompiler.scala101.ch03
/**
*
* @author Richard Li
*/
object FlexibleArgs extends App {
def sum(i: Int*): Int = {
i.sum
}
def max(i: Int*): Int = {
(0 /: i) {
Math.max
}
}
def checkType(i: Int*): Unit = {
println(i.getClass)
}
println(sum(1, 2, 3, 4))
println(max(1, 2, 3, 4))
val numbers = Array(1, 2, 3, 4)
//Error
//val s = sum(numbers)
val s = sum(numbers: _*) // array explode notation
println(s)
checkType(numbers: _*)
}
| precompiler/scala-101 | learning-scala/src/main/scala/org/precompiler/scala101/ch03/FlexibleArgs.scala | Scala | apache-2.0 | 503 |
package dedep.bonobo._match.result
case class WinA(override val aGoals: Int, override val bGoals: Int) extends MatchResult {
require(aGoals > bGoals)
} | dedep/bonobo-core | src/main/scala/dedep/bonobo/_match/result/WinA.scala | Scala | mit | 154 |
/**
* © 2019 Refinitiv. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.web.ld.cmw
import java.util.concurrent.TimeUnit
import akka.actor.ActorSystem
import cmwell.domain._
import cmwell.fts.{Equals, FieldFilter, PathFilter, Should}
import cmwell.util.string.Hash._
import cmwell.util.string._
import cmwell.ws.Settings
import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache}
import com.typesafe.scalalogging.LazyLogging
import javax.inject._
import akka.util.Timeout
import cmwell.util.{BoxedFailure, EmptyBox, FullBox}
import ld.cmw.TimeBasedAccumulatedNsCache
import ld.exceptions.{ConflictingNsEntriesException, ServerComponentNotAvailableException}
import logic.CRUDServiceFS
import scala.concurrent._
import scala.concurrent.ExecutionContext.{global => globalExecutionContext}
import scala.concurrent.duration._
import scala.util.{Failure, Success, Try}
object CMWellRDFHelper {
sealed trait PrefixResolvingError extends RuntimeException
object PrefixResolvingError {
def apply(prefix: String) = new RuntimeException(s"Failed to resolve prefix [$prefix]") with PrefixResolvingError
def apply(prefix: String, cause: Throwable) =
new RuntimeException(s"Failed to resolve prefix [$prefix]", cause) with PrefixResolvingError
}
sealed trait PrefixState //to perform
case object Create extends PrefixState
case object Exists extends PrefixState
// case object Update extends PrefixState
private sealed trait ByAlg
private case object ByBase64 extends ByAlg
private case object ByCrc32 extends ByAlg
}
@Singleton
class CMWellRDFHelper @Inject()(val crudServiceFS: CRUDServiceFS,
injectedExecutionContext: ExecutionContext,
actorSystem: ActorSystem)
extends LazyLogging {
import CMWellRDFHelper._
implicit val timeout = akka.util.Timeout(10.seconds) // TODO IS THIS OK?
implicit val ec = injectedExecutionContext // TODO IS THIS OK?
val newestGreatestMetaNsCacheImpl =
TimeBasedAccumulatedNsCache(Map.empty, 0L, 2.minutes, crudServiceFS)(injectedExecutionContext, actorSystem)
private def validateInfoton(infoton: Infoton): Try[(String, String)] = {
if (!infoton.systemFields.path.matches("/meta/ns/[^/]+"))
Failure(new IllegalStateException(s"weird looking path for /meta/ns infoton [${infoton.systemFields.path}/${infoton.uuid}]"))
else if (infoton.fields.isEmpty)
Failure(new IllegalStateException(s"no fields found for /meta/ns infoton [${infoton.systemFields.path}/${infoton.uuid}]"))
else {
val f = infoton.fields.get
metaNsFieldsValidator(infoton, f, "prefix").flatMap { p =>
metaNsFieldsValidator(infoton, f, "url").map(_ -> p)
}
}
}
private def metaNsFieldsValidator(i: Infoton, fields: Map[String, Set[FieldValue]], field: String): Try[String] = {
fields
.get(field)
.fold[Try[String]](
Failure(new IllegalStateException(s"$field field not found for /meta/ns infoton [${i.systemFields.path}/${i.uuid}]"))
) { values =>
if (values.isEmpty)
Failure(
new IllegalStateException(s"empty value set for $field field in /meta/ns infoton [${i.systemFields.path}/${i.uuid}]")
)
else if (values.size > 1)
Failure(
new IllegalStateException(
s"multiple values ${values.mkString("[,", ",", "]")} for $field field in /meta/ns infoton [${i.systemFields.path}/${i.uuid}]"
)
)
else
values.head.value match {
case s: String => Success(s)
case x =>
Failure(
new IllegalStateException(
s"found a weird /meta/ns infoton without a string value [${x.getClass.getSimpleName}] for prefix: [$i]"
)
)
}
}
}
private[this] val looksLikeHashedNsIDRegex = "[A-Za-z0-9\\\\-_]{5,7}"
private[this] val transformFuncURL2URL: Try[String] => Option[String] = {
case Success(url) => Some(url)
case Failure(_: NoSuchElementException) => None
case Failure(e) => throw e
}
private[this] val transformFuncURLAndPrefix2URLAndPrefix: Try[(String, String)] => Option[(String, String)] = {
case Success(urlAndPrefix) => Some(urlAndPrefix)
case Failure(_: NoSuchElementException) => None
case Failure(e) => throw e
}
@inline def invalidate(nsID: String)(implicit timeout: Timeout): Future[Unit] =
newestGreatestMetaNsCacheImpl.invalidate(nsID)
@inline def invalidateAll()(implicit timeout: Timeout): Future[Unit] = newestGreatestMetaNsCacheImpl.invalidateAll()
@deprecated("API may falsely return None on first calls for some value", "Quetzal")
def hashToUrl(nsID: String, timeContext: Option[Long]): Option[String] = {
val f = hashToUrlAsync(nsID, timeContext)
f.value.fold[Option[String]] {
if (nsID.matches(looksLikeHashedNsIDRegex))
// Await is OK here, as it is very rare (first fetch of an id which was not found in cache)
Await.ready(f, 9.seconds).value.flatMap(transformFuncURL2URL)
else
None
}(transformFuncURL2URL)
}
def hashToUrlAsync(hash: String, timeContext: Option[Long])(implicit ec: ExecutionContext): Future[String] =
newestGreatestMetaNsCacheImpl
.get(hash, timeContext)
.transform {
case f @ Failure(_: NoSuchElementException) => f.asInstanceOf[Try[String]]
case f @ Failure(_: ConflictingNsEntriesException) => f.asInstanceOf[Try[String]]
case Success((url, _)) => Success(url)
case Failure(e) => Failure(ServerComponentNotAvailableException(s"hashToUrlAsync failed for [$hash]", e))
}(ec)
@deprecated("API may falsely return None on first calls for some value", "Quetzal")
def urlToHash(url: String, timeContext: Option[Long]): Option[String] =
newestGreatestMetaNsCacheImpl.getByURL(url, timeContext).value match {
case None => throw ServerComponentNotAvailableException("Internal old API (urlToHash) used on id [" + url +
"], which was not yet in cache. subsequent requests should succeed eventually. Call should be migrated to new API")
case Some(Success(nsID)) => Some(nsID)
case Some(Failure(_: NoSuchElementException)) => None
case Some(Failure(e)) => throw e
}
def urlToHashAsync(url: String, timeContext: Option[Long])(implicit ec: ExecutionContext): Future[String] =
newestGreatestMetaNsCacheImpl
.getByURL(url, timeContext)
.transform {
case f @ Failure(_: NoSuchElementException) => f.asInstanceOf[Try[String]]
case f @ Failure(_: ConflictingNsEntriesException) => f.asInstanceOf[Try[String]]
case Failure(e) => Failure(ServerComponentNotAvailableException(s"urlToHashAsync failed for [$url]", e))
case success => success
}(ec)
def getIdentifierForPrefixAsync(prefix: String,
timeContext: Option[Long])(implicit ec: ExecutionContext): Future[String] =
newestGreatestMetaNsCacheImpl
.getByPrefix(prefix, timeContext)
.transform {
case f @ Failure(_: NoSuchElementException) => f.asInstanceOf[Try[String]]
case f @ Failure(_: ConflictingNsEntriesException) => f.asInstanceOf[Try[String]]
case Failure(e) =>
Failure(ServerComponentNotAvailableException(s"getIdentifierForPrefixAsync failed for [$prefix]", e))
case success => success
}(ec)
@deprecated("API may falsely return None on first calls for some value", "Quetzal")
def hashToUrlAndPrefix(nsID: String, timeContext: Option[Long]): Option[(String, String)] = {
val f = newestGreatestMetaNsCacheImpl.get(nsID, timeContext)
f.value.fold[Option[(String, String)]] {
if (nsID.matches(looksLikeHashedNsIDRegex))
// Await is OK here, as it is very rare (first fetch of an id which was not found in cache)
Await.ready(f, 9.seconds).value.flatMap(transformFuncURLAndPrefix2URLAndPrefix)
else
None
}(transformFuncURLAndPrefix2URLAndPrefix)
}
/**
* @param url as plain string
* @return corresponding hash, or if it's a new namespace, will return an available hash to register the meta infoton at,
* paired with a boolean indicating if this is new or not/
*/
def nsUrlToHash(url: String, timeContext: Option[Long]): (String, PrefixState) = {
def inner(hash: String): Future[(String, PrefixState)] =
hashToUrlAsync(hash, timeContext)(globalExecutionContext).transformWith {
case Success(`url`) => Future.successful(hash -> Exists)
case Failure(_: NoSuchElementException) => Future.successful(hash -> Create)
case Failure(err) =>
Future.failed(new Exception(s"nsUrlToHash.inner failed for url [$url] and hash [$hash]", err))
case Success(notSameUrl /* not same url */ ) => {
val doubleHash = crc32base64(hash)
logger.warn(s"double hashing url's [$url] hash [$hash] to [$doubleHash] because not same as [$notSameUrl]")
inner(doubleHash)
}
}(globalExecutionContext)
Await.result(
urlToHashAsync(url, timeContext).transformWith {
case Success(nsIdentifier) => Future.successful(nsIdentifier -> Exists)
case Failure(_: NoSuchElementException) => inner(crc32base64(url))
case Failure(somethingBad) =>
logger.error(s"nsUrlToHash failed for url [$url]", somethingBad)
Future.failed(somethingBad)
},
Duration.Inf
) //FIXME: Await...
}
@inline def hashIterator(url: String) =
Iterator.iterate(cmwell.util.string.Hash.crc32base64(url))(cmwell.util.string.Hash.crc32base64)
// in case of ambiguity between meta/ns infotons with same url, this will return the one that was not auto-generated
def getTheFirstGeneratedMetaNsInfoton(url: String,
infotons: Seq[Infoton],
timeContext: Option[Long]): Future[Infoton] = {
require(infotons.nonEmpty)
val hashSet = infotons.view.map(_.systemFields.name).to(Set)
val hashChain = hashIterator(url).take(infotons.length + 5).toStream
// find will return the first (shortest compute chain) hash
hashChain.find(hashSet) match {
case Some(h) =>
Future.successful(infotons.find(_.systemFields.name == h).get) //get is safe here because `hashSet` was built from infotons names
case None =>
/* if we were not able to find a suitable hash
* that back a /meta/ns infoton from the given
* Seq, it means one of two things:
* Either we have so many collisions in /meta/ns
* that all the hashes computed points to other
* namespaces,
* or that we have old style unhashed identifiers
* in /meta/ns.
* Giving precedence to hashed versions, since from
* now on (Jan 2018) old style isn't supported,
* and should have been migrated to hashed identifiers.
* Only if we fail to find such, we will arbitrarily
* choose the first in lexicographic order from the Seq
*/
logger.warn(s"hashChain ${hashChain.mkString("[", ", ", "]")} did not contain a valid identifier for ${hashSet
.mkString("[", ", ", "]")}")
getFirstHashForNsURL(url, infotons, timeContext).transform {
case Success(Right(i)) => Success(i)
case Success(Left(hash)) =>
Failure(
new IllegalStateException(
s"There's an unoccupied hash [$hash] that can fit [$url]. Manual data repair is required. please also consider ns ambiguities [$infotons]"
)
)
case Failure(err) =>
val first = infotons.minBy(_.systemFields.name)
logger.warn(
s"Was unable to validate any of the given infotons [$infotons], choosing the first in lexicographic order [${first.systemFields.path}]",
err
)
Success(first)
}(globalExecutionContext)
}
}
def hashToInfotonAsync[T](hash: String)(out: (Infoton, (String, String)) => T)(ec: ExecutionContext): Future[T] =
crudServiceFS
.getInfotonByPathAsync(s"/meta/ns/$hash")
.transform {
case Failure(err) => Failure(new IllegalStateException(s"could not load /meta/ns/$hash", err))
case Success(EmptyBox) => Failure(new NoSuchElementException(s"could not load /meta/ns/$hash"))
case Success(BoxedFailure(err)) =>
Failure(new IllegalStateException(s"could not load /meta/ns/$hash from IRW", err))
case Success(FullBox(infoton)) =>
validateInfoton(infoton).transform(
urlPrefix => Try(out(infoton, urlPrefix)),
e => Failure(new IllegalStateException(s"loaded invalid infoton for [$hash] / [$infoton]", e))
)
}(ec)
def getFirstHashForNsURL(url: String,
infotons: Seq[Infoton],
timeContext: Option[Long]): Future[Either[String, Infoton]] = {
val it = hashIterator(url)
def foldWhile(usedHashes: Set[String]): Future[Either[String, Infoton]] = {
val hash = it.next()
if (usedHashes(hash))
Future.failed(
new IllegalStateException(
s"found a hash cycle starting with [$hash] without getting a proper infoton for [$url]"
)
)
else
hashToUrlAsync(hash, timeContext)(globalExecutionContext).transformWith {
case Success(`url`) =>
infotons
.find(_.systemFields.name == hash)
.fold[Future[Either[String, Infoton]]] {
logger.error(s"hash [$hash] returned the right url [$url], but was not found in original seq?!?!?")
// getting the correct infoton anyway:
hashToInfotonAsync(hash)((infoton, _) => infoton)(globalExecutionContext)
.map(Right.apply)(globalExecutionContext)
}((Future.successful[Either[String, Infoton]] _).compose(Right.apply))
case Success(someOtherUrl) =>
// Yes. I am aware the log will be printed in every iteration of the recursion. That's the point.
logger.warn(
s"ns collision detected. Hash [$hash] points to [$someOtherUrl] but can be computed from [$url]. " +
s"This might be the result of abusing the namespace mechanism, which is not supposed to be used with too many namespaces. " +
s"Since current implementation uses a hash with 32 bits of entropy, it means that if you have more than 64K namespaces, " +
s"you'll have over 50% chance of collision. This is way above what should be necessary, and unless you are very unlucky, " +
s"which in this case you'll have a single namespace causing this log to be printed once in a while for the same namespace, " +
s"but can probably ignore it, it is likely that you are abusing CM-Well in ways this humble developer didn't thought reasonable. " +
s"In this case, either refactor using a hash function with more bits of entropy is needed (may I recommend `xxhash64`, " +
s"which you'll probably find at `cmwell.util.string.Hash.xxhash64`, assuming 64 bits of entropy will suffice), or, " +
s"reconsider your use-case as it is probably wrong, or buggy. Please inspect the ns bookkeeping infotons under /meta/ns."
)
foldWhile(usedHashes + hash)
case Failure(err) =>
logger.warn(s"could not load hash [$hash] for [$infotons]", err)
Future.successful(Left(hash))
}(globalExecutionContext)
}
foldWhile(Set.empty)
}
// ^
// /|\\
// |
// └-- NS caching section
//
// ┌-- QUADS caching section
// |
// \\|/
// v
private[this] val graphToAliasCache: LoadingCache[String, String] = CacheBuilder
.newBuilder()
.maximumSize(Settings.quadsCacheSize)
.expireAfterWrite(2, TimeUnit.MINUTES)
.build {
new CacheLoader[String, String] {
override def load(url: String): String = {
val f = getAliasForQuadUrlAsyncActual(url)(injectedExecutionContext)
f.onComplete {
case Success(Some(alias)) => aliasToGraphCache.put(alias, url)
case Success(None) => logger.trace(s"load for graph: $url is empty")
case Failure(e) => logger.error(s"load for $url failed", e)
}(scala.concurrent.ExecutionContext.Implicits.global)
Await.result(f, 10.seconds).get
}
}
}
private[this] val aliasToGraphCache: LoadingCache[String, String] = CacheBuilder
.newBuilder()
.maximumSize(Settings.quadsCacheSize)
.expireAfterWrite(2, TimeUnit.MINUTES)
.build {
new CacheLoader[String, String] {
override def load(alias: String): String = {
val f = getQuadUrlForAliasAsyncActual(alias)(injectedExecutionContext)
f.onComplete {
case Success(graph) => graphToAliasCache.put(graph, alias)
case Failure(e) => logger.error(s"load for $alias failed", e)
}(scala.concurrent.ExecutionContext.Implicits.global)
Await.result(f, 10.seconds)
}
}
}
def getAliasForQuadUrl(graphName: String): Option[String] = Try(graphToAliasCache.get(graphName)).toOption
def getAliasForQuadUrlAsync(graph: String)(implicit ec: ExecutionContext): Future[Option[String]] = {
val f = getAliasForQuadUrlAsyncActual(graph)
f.onComplete {
case Success(Some(alias)) => {
graphToAliasCache.put(graph, alias)
aliasToGraphCache.put(alias, graph)
}
case Success(None) => logger.info(s"graph: $graph could not be retrieved")
case Failure(e) => logger.error(s"getAliasForQuadUrlAsync for $graph failed", e)
}
f
}
def getQuadUrlForAlias(alias: String): Option[String] = Try(aliasToGraphCache.get(alias)).toOption
def getQuadUrlForAliasAsync(alias: String)(implicit ec: ExecutionContext): Future[String] = {
val f = getQuadUrlForAliasAsyncActual(alias)
f.onComplete {
case Success(graph) => {
aliasToGraphCache.put(alias, graph)
graphToAliasCache.put(graph, alias)
}
case Failure(e) => logger.error(s"getQuadUrlForAliasAsync for $alias failed", e)
}
f
}
def getQuadUrlForAliasAsyncActual(alias: String)(implicit ec: ExecutionContext): Future[String] = {
crudServiceFS
.search(pathFilter = Some(PathFilter("/meta/quad", false)),
fieldFilters = Some(FieldFilter(Should, Equals, "alias", alias)),
datesFilter = None,
withData = true)
.map {
case SearchResults(_, _, total, _, length, infotons, _) => {
require(total != 0, s"the alias $alias is not associated to any graph")
val url = {
val byUrl = infotons.groupBy(_.fields.flatMap(_.get("url")))
require(byUrl.size == 1, s"group by url must be unambiguous: $byUrl")
//TODO: eliminate intentional duplicates (same graph in `/meta/quad/crc32` & `/meta/quad/base64`), i.e. delete crc32 version as a side effect.
//TODO: if only crc32 version exists, replace it with base64 version
//i.e: byUrl.valuesIterator.foreach(...)
val urlSet = byUrl.keys.headOption.flatMap(identity)
require(urlSet.isDefined, s"url must have keys defined: $byUrl, $urlSet")
val urls = urlSet.toSeq.flatMap(identity)
require(urls.size == 1,
s"must have exactly 1 URI: ${urls.mkString("[", ",", "]")}, fix any of this by using")
require(urls.head.value.isInstanceOf[String], "url value not a string")
urls.head.value.asInstanceOf[String]
}
url
}
}
}
private[this] def getAliasForQuadUrlAsyncActual(
graphName: String
)(implicit ec: ExecutionContext): Future[Option[String]] = {
getAliasForQuadUrlAsync(graphName, ByBase64).flatMap {
case some: Some[String] => Future.successful(some)
case None => getAliasForQuadUrlAsync(graphName, ByCrc32)
}
}
private[this] def getAliasForQuadUrlAsync(graphName: String, byAlg: ByAlg = ByBase64)(
implicit ec: ExecutionContext
): Future[Option[String]] = {
val hashByAlg: String = byAlg match {
case ByBase64 => Base64.encodeBase64URLSafeString(graphName)
case ByCrc32 => Hash.crc32(graphName)
}
crudServiceFS.getInfoton("/meta/quad/" + hashByAlg, None, None).flatMap {
case Some(Everything(i)) =>
Future.successful[Option[String]] {
i.fields.flatMap(_.get("alias").flatMap { set =>
{
val aliases = set.collect {
case FString(value, _, _) => value
}
if (aliases.size > 1) {
logger.warn(s"quads ambiguity alert: $aliases")
}
aliases.headOption
}
})
}
case _ => Future.successful(None)
}
}
}
| dudi3001/CM-Well | server/cmwell-ws/app/ld/cmw/CMWellRDFHelper.scala | Scala | apache-2.0 | 22,062 |
package org.jetbrains.sbt.project.template.techhub
import com.intellij.openapi.application.ModalityState
import com.intellij.openapi.externalSystem.service.execution.ProgressExecutionMode
import com.intellij.openapi.externalSystem.service.project.wizard.AbstractExternalModuleBuilder
import com.intellij.openapi.externalSystem.settings.{AbstractExternalSystemSettings, ExternalSystemSettingsListener}
import com.intellij.openapi.externalSystem.util.{ExternalSystemApiUtil, ExternalSystemUtil}
import com.intellij.openapi.project.Project
import org.jetbrains.sbt.project.SbtProjectSystem
import org.jetbrains.sbt.project.settings.SbtProjectSettings
/**
* User: Dmitry.Naydanov
* Date: 22.02.17.
*/
trait SbtRefreshCaller {
this: AbstractExternalModuleBuilder[SbtProjectSettings] =>
def callForRefresh(project: Project) {
val runnable = new Runnable {
override def run(): Unit = {
val settings =
ExternalSystemApiUtil.getSettings(project, SbtProjectSystem.Id).
asInstanceOf[AbstractExternalSystemSettings[_ <: AbstractExternalSystemSettings[_, SbtProjectSettings, _],
SbtProjectSettings, _ <: ExternalSystemSettingsListener[SbtProjectSettings]]]
getExternalProjectSettings setExternalProjectPath getContentEntryPath
settings linkProject getExternalProjectSettings
ExternalSystemUtil.refreshProject(project, SbtProjectSystem.Id, getContentEntryPath,
false, ProgressExecutionMode.IN_BACKGROUND_ASYNC)
}
}
ExternalSystemUtil.invokeLater(project, ModalityState.NON_MODAL, runnable)
}
}
| triplequote/intellij-scala | scala/scala-impl/src/org/jetbrains/sbt/project/template/techhub/SbtRefreshCaller.scala | Scala | apache-2.0 | 1,602 |
/*
* Copyright 2015 eleflow.com.br.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* @src https://openntf.org/XSnippets.nsf/snippet.xsp?id=dateutils
*/
package eleflow.uberdata.core.util
import java.nio.charset.StandardCharsets
import java.nio.file.{FileSystems, Files}
import java.text.ParseException
import eleflow.uberdata.core.conf.SparkNotebookConfig
import eleflow.uberdata.core.enums.PeriodOfDay
import org.apache.spark.SparkFiles
import org.joda.time.{DateTime, DateTimeZone}
import org.joda.time.format.DateTimeFormat
import scala.collection.JavaConversions._
import scala.util.{Success, Try}
/**
* Created by dirceu on 24/02/15.
*/
object DateTimeParser extends Serializable {
val offset = DateTimeZone.getDefault.getOffset(new DateTime())
def apply(offset: Int) = {
new DateTimeParser(offset)
}
def apply() = {
new DateTimeParser(offset)
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
final class DateTimeParser(offset: Int) extends Serializable {
def parse(dateString: String): Option[DateTime] = {
val dateFormat: Option[String] =
readDateFormat.orElse(determineDateFormat(dateString))
dateFormat.flatMap { f =>
Try {
parse(dateString, dateFormat)
} match {
case Success(s) => s
case _ => None
}
}
}
def parse(dateString: String, dateFormat: String): Option[DateTime] = {
val formatter = DateTimeFormat.forPattern(dateFormat).withZoneUTC()
Some(formatter.parseDateTime(dateString)) //.minusMillis(offset))
}
def parse(dateString: String, dateFormatOption: Option[String]): Option[DateTime] = {
dateFormatOption match {
case Some(dateFormat) =>
parse(dateString, dateFormat)
case None =>
parse(dateString)
}
}
def isValidDate(dateString: String): Boolean = parse(dateString).isDefined
def isValidDate(dateString: String, dateFormat: String): Boolean = {
try {
parse(dateString, dateFormat)
true
} catch {
case e: ParseException =>
false
}
}
def determineDateFormat(dateString: String): Option[String] =
DATE_FORMAT_REGEXPS.keySet
.find(regexp => dateString.toLowerCase.matches(regexp))
.flatMap(f => DATE_FORMAT_REGEXPS.get(f))
private val DATE_FORMAT_REGEXPS: Map[String, String] = Map(
"^\\\\d{8}$" -> "yyyyMMdd",
"""^\\d{1,2}-\\d{1,2}-\\d{4}$""" -> "dd-MM-yyyy",
"""^\\d{4}-\\d{1,2}-\\d{1,2}$""" -> "yyyy-MM-dd",
"""^\\d{1,2}/\\d{1,2}/\\d{4}$""" -> "MM/dd/yyyy",
"""^\\d{4}/\\d{1,2}/\\d{1,2}$""" -> "yyyy/MM/dd",
"""^\\d{1,2}\\s[a-z]{3}\\s\\d{4}$""" -> "dd MMM yyyy",
"""^\\d{1,2}\\s[a-z]{4,}\\s\\d{4}$""" -> "dd MMMM yyyy",
"""^\\d{12}$""" -> """yyyyMMddHHmm""",
"""^\\d{8}\\s\\d{4}$""" -> """yyyyMMdd HHmm""",
"""^\\d{1,2}-\\d{1,2}-\\d{4}\\s\\d{1,2}:\\d{2}$""" -> "dd-MM-yyyy HH:mm",
"""^\\d{4}-\\d{1,2}-\\d{1,2}\\s\\d{1,2}:\\d{2}$""" -> "yyyy-MM-dd HH:mm",
"""^\\d{1,2}/\\d{1,2}/\\\\d{4}\\s\\d{1,2}:\\d{2}$""" -> "MM/dd/yyyy HH:mm",
"""^\\d{4}/\\d{1,2}/\\\\d{1,2}\\s\\d{1,2}:\\d{2}$""" -> "yyyy/MM/dd HH:mm",
"""^\\d{1,2}\\s[a-z]{3}\\s\\d{4}\\s\\d{1,2}:\\d{2}$""" -> "dd MMM yyyy HH:mm",
"""^\\d{1,2}\\s[a-z]{4,}\\s\\d{4}\\s\\d{1,2}:\\d{2}$""" -> "dd MMMM yyyy HH:mm",
"""^\\d{14}$""" -> """yyyyMMddHHmmss""",
"""^\\d{8}\\\\s\\d{6}$""" -> """yyyyMMdd HHmmss""",
"""^\\d{1,2}-\\d{1,2}-\\d{4}\\s\\d{1,2}:\\d{2}:\\d{2}$""" -> "dd-MM-yyyy HH:mm:ss",
"""^\\d{4}-\\d{1,2}-\\d{1,2}\\s\\d{1,2}:\\d{2}:\\d{2}$""" -> "yyyy-MM-dd HH:mm:ss",
"""^\\d{1,2}/\\d{1,2}/\\d{4}\\s\\d{1,2}:\\d{2}:\\d{2}$""" -> "MM/dd/yyyy HH:mm:ss",
"""^\\d{4}/\\d{1,2}/\\d{1,2}\\s\\d{1,2}:\\d{2}:\\d{2}$""" -> "yyyy/MM/dd HH:mm:ss",
"""^\\d{1,2}\\s[a-z]{3}\\s\\d{4}\\s\\d{1,2}:\\d{2}:\\d{2}$""" -> "dd MMM yyyy HH:mm:ss",
"""^\\d{1,2}\\s[a-z]{4,}\\s\\d{4}\\s\\d{1,2}:\\d{2}:\\d{2}$""" -> "dd MMMM yyyy HH:mm:ss"
)
def period(date: DateTime): PeriodOfDay.PeriodOfDay = {
date.getHourOfDay match {
case hour if hour < 6 => PeriodOfDay.Dawn
case hour if hour < 12 => PeriodOfDay.Morning
case hour if hour < 18 => PeriodOfDay.Afternoon
case _ => PeriodOfDay.Evening
}
}
lazy val dateFormatFilePath = FileSystems.getDefault.getPath(
SparkNotebookConfig.tempFolder,
SparkNotebookConfig.propertyFolder,
SparkNotebookConfig.dateFormatFileName
)
private lazy val propertyFolderPath = FileSystems.getDefault.getPath(
SparkNotebookConfig.tempFolder,
SparkNotebookConfig.propertyFolder
)
def applyDateFormat(dateFormat: String) = {
if (Files.notExists(propertyFolderPath)) {
Files.createDirectory(propertyFolderPath)
}
Files.deleteIfExists(dateFormatFilePath)
Files.write(dateFormatFilePath, dateFormat.getBytes)
}
private def readDateFormat = {
val clusterFilePath = FileSystems.getDefault.getPath(
SparkFiles.get(SparkNotebookConfig.dateFormatFileName)
)
if (Files.exists(clusterFilePath))
Files.readAllLines(clusterFilePath, StandardCharsets.UTF_8).headOption
else None
}
}
| eleflow/uberdata | iuberdata_core/src/main/scala/eleflow/uberdata/core/util/DateTimeParser.scala | Scala | apache-2.0 | 6,239 |
package k2b6s9j.Define
import org.scaloid.common._
import scala.language.postfixOps
class DefineActivity extends SActivity with SContext {
/**
* Called when the activity is first created.
*/
onCreate {
contentView = new SVerticalLayout {
STextView(R.string.introduction)
STextView(R.string.search)
STextView(R.string.list)
SButton("View Test Definition Activity").onClick(startActivity[DefinitionActivity])
}.padding(20 dip)
}
}
| kepler0/Define | android/src/k2b6s9j/Define/DefineActivity.scala | Scala | mit | 500 |
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try
/**
* Implicit Function Types:
* - http://dotty.epfl.ch/docs/reference/implicit-function-types.html,
* - https://www.scala-lang.org/blog/2016/12/07/implicit-function-types.html
*/
object ImplicitFunctions {
object context {
// type alias Contextual
type Contextual[T] = implicit ExecutionContext => T
// sum is expanded to sum(x, y)(ctx)
def asyncSum(x: Int, y: Int): Contextual[Future[Int]] = Future(x + y)
def asyncMult(x: Int, y: Int) = { implicit ctx: ExecutionContext =>
Future(x * y)
}
}
object parse {
type Parseable[T] = implicit ImplicitParams.StringParser[T] => Try[T]
def sumStrings(x: String, y: String): Parseable[Int] = {
val parser = implicitly[ImplicitParams.StringParser[Int]]
val tryA = parser.parse(x)
val tryB = parser.parse(y)
for {
a <- tryA
b <- tryB
} yield a + b
}
}
def test: Unit = {
import ExecutionContext.Implicits.global
context.asyncSum(3, 4).foreach(println)
context.asyncMult(3, 4).foreach(println)
println(parse.sumStrings("3", "4"))
println(parse.sumStrings("3", "a"))
}
}
| smarter/dotty-example-project | src/main/scala/ImplicitFunctions.scala | Scala | bsd-3-clause | 1,223 |
/*
* Copyright 2012-2014 Kieron Wilkinson.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package viper.ui
import viper.domain._
/**
* Manage state of a subscriber for visualisation. This class holds the unread count,
* as well as the highest severity (non-read) item in the subscription. This could have
* been more nicely done with a Calculation in GlazedLists, but updating the list with
* changes to a large number of elements (like 10,000) is pretty slow.
*/
case class Subscribed(subscriber: Subscriber) {
/** Unread counts for each severity. */
private var unreadCounts = new Array[Int](Severities.count)
def unread = unreadCounts.sum
def severity: Severity = {
for (i <- Severities.max.ordinal to Severities.min.ordinal by -1) {
if (unreadCounts(i) > 0) {
return Severities.values.find(_.ordinal == i).get
}
}
Info
}
def added(rs: Seq[Record]) {
rs.foreach(added(_))
}
def added(r: Record) {
if (!isRead(r)) {
update(r, 1)
}
}
def deleted(ds: Seq[Record]) {
ds.foreach(deleted(_))
}
def deleted(r: Record) {
if (!isRead(r)) {
update(r, -1)
}
}
def read(r: Record, oldRead: Boolean) {
if (!oldRead) {
update(r, -1)
}
}
def unread(r: Record, oldRead: Boolean) {
if (oldRead) {
update(r, 1)
}
}
private def update(record: Record, inc: Int) {
unreadCounts(record.severity.ordinal) += inc
}
private def isRead(record: Record) = record match {
case r: Readable => r.read
case _ => false //todo maybe true, so we always show colour of non-Readable lists
}
}
| vyadh/viper | ui/src/main/scala/viper/ui/Subscribed.scala | Scala | apache-2.0 | 2,148 |
import sbt._
import Keys._
import cn.gov.heshan.sbt.CustomSettings
import com.typesafe.sbt.SbtGit._
object slickea extends Build {
val welcomeString = """
welcome to build enuma elish !
_ _ _
| | (_) | |
___ | | _ ___ | | __ ___ __ _
/ __| | | | | / __| | |/ / / _ \\ / _` |
\\__ \\ | | | | | (__ | < | __/ | (_| |
|___/ |_| |_| \\___| |_|\\_\\ \\___| \\__,_|
"""
println(welcomeString)
lazy val slickea = Project(
id = "slickea",
base = file("."),
settings = Defaults.coreDefaultSettings
)
.enablePlugins(com.typesafe.sbt.GitBranchPrompt)
.settings(
name := "slickea",
libraryDependencies ++= Seq(
"com.typesafe.slick" %% "slick" % "3.0.0",
"org.scala-lang" % "scala-compiler" % scalaVersion.value
),
libraryDependencies ++= CustomSettings.jpaDependencies,
dependencyOverrides ++= {
Set(
)
}
)
.settings(CustomSettings.customSettings: _*)
lazy val model4Test = Project(
id = "model4Test",
base = file("./model4Test"),
settings = Defaults.coreDefaultSettings
)
.settings(
name := "model4Test",
libraryDependencies ++= CustomSettings.jpaDependencies,
compileOrder in Compile := CompileOrder.JavaThenScala
)
.settings(CustomSettings.customSettings: _*) dependsOn slickea
lazy val slickeaSimpleTest = Project(
id = "simpleTest",
base = file("./simpleTest"),
settings = Defaults.coreDefaultSettings
)
.settings(
name := "simpleTest",
libraryDependencies ++= CustomSettings.testDependencies
)
.settings(CustomSettings.customSettings: _*) dependsOn model4Test
lazy val slickeaFullTest = Project(
id = "fullTest",
base = file("./fullTest"),
settings = Defaults.coreDefaultSettings
)
.settings(
name := "fullTest",
libraryDependencies ++= CustomSettings.testDependencies
)
.settings(CustomSettings.customSettings: _*) dependsOn model4Test
} | djx314/slickea | project/slickea.scala | Scala | mit | 2,000 |
import play.Project._
import sbt._
import sbt.Keys._
import org.scalastyle.sbt.ScalastylePlugin
import com.typesafe.sbt.SbtScalariform
object BuildSettings {
val appOrganization = "com.itsdamiya"
val appName = "Legendary"
val appVersion = "0.0.6-SNAPSHOT"
val commonResolvers = Seq(
"Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots",
"Typesafe Releases" at "http://repo.typesafe.com/typesafe/releases/",
"spray repo" at "http://repo.spray.io"
)
val commonSettings = Seq(
organization := appOrganization,
scalacOptions ++= Seq("-feature", "-deprecation", "-language:postfixOps", "-language:reflectiveCalls", "-language:implicitConversions", "-Xlint"),
resolvers ++= commonResolvers
) ++ ScalastylePlugin.Settings ++ SbtScalariform.defaultScalariformSettings
val commonDeps = Seq(
json,
"com.typesafe" %% "scalalogging-slf4j" % "1.0.1",
"ch.qos.logback" % "logback-classic" % "1.1.1",
"org.scalatest" % "scalatest_2.10" % "2.0" % "test",
"org.scalacheck" %% "scalacheck" % "1.11.3" % "test",
"org.scalamock" %% "scalamock-scalatest-support" % "3.0.1" % "test"
)
val coreDeps = Seq(
"com.typesafe.slick" %% "slick" % "2.0.0",
"postgresql" % "postgresql" % "9.1-901.jdbc4",
"joda-time" % "joda-time" % "2.3 ",
"org.joda" % "joda-convert" % "1.5",
"org.mindrot" % "jbcrypt" % "0.3m",
"com.github.tototoshi" %% "slick-joda-mapper" % "1.0.0",
"com.typesafe.play" %% "play-slick" % "0.6.0-SNAPSHOT",
"net.sf.ehcache" % "ehcache-core" % "2.6.8",
filters,
jdbc,
ws
) ++ commonDeps
val fateClasherDeps = Seq(
"io.spray" % "spray-client" % "1.3-RC1",
"com.typesafe.akka" %% "akka-actor" % "2.3.0-RC1"
) ++ commonDeps
}
object Build extends Build {
import BuildSettings._
lazy val FateClasherProject = Project("LibFateClasher", file("LibFateClasher"))
.settings(commonSettings: _*)
.settings(libraryDependencies := fateClasherDeps)
lazy val LegendaryCoreProject = play.Project("Legendary-Core", appVersion, coreDeps, path = file("Legendary-Core"))
.settings(commonSettings: _*)
.dependsOn(FateClasherProject)
lazy val root = Project("Legendary", file("."))
.settings(commonSettings: _*)
.aggregate(Seq[ProjectReference](FateClasherProject, LegendaryCoreProject): _*)
}
| Damiya/legendary | project/Build.scala | Scala | apache-2.0 | 2,364 |
/*
* Copyright (c) 2014 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
import sbt._
object Dependencies {
val resolutionRepos = Seq(
// For Snowplow and amazon-kinesis-connectors
"Snowplow Analytics Maven releases repo" at "http://maven.snplow.com/releases/",
"Snowplow Analytics Maven snapshot repo" at "http://maven.snplow.com/snapshots/",
// For Scalazon
"BintrayJCenter" at "http://jcenter.bintray.com",
// For user-agent-utils
"user-agent-utils repo" at "https://raw.github.com/HaraldWalker/user-agent-utils/mvn-repo/"
)
object V {
// Java
val logging = "1.1.3"
val slf4j = "1.7.5"
val kinesisClient = "1.0.0"
val kinesisConnector = "1.1.2"
val elasticsearch = "1.4.4"
// Scala
val argot = "1.0.1"
val config = "1.0.2"
val scalaUtil = "0.1.0"
val snowplowCommonEnrich = "0.15.0"
val scalazon = "0.5"
val scalaz7 = "7.0.0"
val snowplowTracker = "0.1.0"
// Scala (test only)
val specs2 = "2.2"
val scalazSpecs2 = "0.1.2"
// Scala (compile only)
val commonsLang3 = "3.1"
}
object Libraries {
// Java
val logging = "commons-logging" % "commons-logging" % V.logging
val slf4j = "org.slf4j" % "slf4j-simple" % V.slf4j
val kinesisClient = "com.amazonaws" % "amazon-kinesis-client" % V.kinesisClient
val kinesisConnector = "com.amazonaws" % "amazon-kinesis-connectors" % V.kinesisConnector
val elasticsearch = "org.elasticsearch" % "elasticsearch" % V.elasticsearch
// Scala
val argot = "org.clapper" %% "argot" % V.argot
val config = "com.typesafe" % "config" % V.config
val scalaUtil = "com.snowplowanalytics" % "scala-util" % V.scalaUtil
val scalazon = "io.github.cloudify" %% "scalazon" % V.scalazon
val scalaz7 = "org.scalaz" %% "scalaz-core" % V.scalaz7
val snowplowTracker = "com.snowplowanalytics" %% "snowplow-scala-tracker" % V.snowplowTracker
// Scala (test only)
val snowplowCommonEnrich = "com.snowplowanalytics" % "snowplow-common-enrich" % V.snowplowCommonEnrich % "test"
val specs2 = "org.specs2" %% "specs2" % V.specs2 % "test"
val scalazSpecs2 = "org.typelevel" %% "scalaz-specs2" % V.scalazSpecs2 % "test"
// Scala (compile only)
val commonsLang3 = "org.apache.commons" % "commons-lang3" % V.commonsLang3 % "compile"
}
}
| mdavid/lessig-bigdata | lib/snowplow/4-storage/kinesis-elasticsearch-sink/project/Dependencies.scala | Scala | mit | 3,657 |
package org.jetbrains.plugins.scala
package annotator
import com.intellij.codeInsight.intention.IntentionAction
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiFile
import org.jetbrains.plugins.scala.annotator.quickfix._
import org.jetbrains.plugins.scala.extensions.{&&, Parent}
import org.jetbrains.plugins.scala.lang.lexer.ScalaModifier
import org.jetbrains.plugins.scala.lang.psi.api.base.ScAnnotation
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunctionDefinition
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.ScTemplateBody
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScObject, ScTypeDefinition}
import org.jetbrains.plugins.scala.lang.psi.types.ScTypesExt
import org.jetbrains.plugins.scala.lang.psi.types.api._
import org.jetbrains.plugins.scala.lang.psi.types.result._
/**
* Pavel.Fatin, 18.05.2010
*/
trait FunctionAnnotator {
self: ScalaAnnotator =>
import FunctionAnnotator._
def annotateFunction(function: ScFunctionDefinition, typeAware: Boolean = true)
(implicit holder: ScalaAnnotationHolder): Unit = {
implicit val projectContext = function.projectContext
if (!function.hasExplicitType && function.definedReturnType.isLeft) {
val message = ScalaBundle.message("function.recursive.need.result.type", function.name)
function.recursiveReferences.foreach {
holder.createErrorAnnotation(_, message)
}
}
for {
annotation <- findTailRecursionAnnotation(function)
} {
val removeAnnotationFix = new RemoveAnnotationQuickFix(annotation)
val functionNameId = function.nameId
if (!canBeTailRecursive(function)) {
holder.createErrorAnnotation(
functionNameId,
ScalaBundle.message("method.annotated.with.tailrec.is.neither.private.nor.final"),
Seq(
new ModifierQuickFix.Add(function, functionNameId, ScalaModifier.Private),
new ModifierQuickFix.Add(function, functionNameId, ScalaModifier.Final),
removeAnnotationFix
)
)
}
if (typeAware) {
function.recursiveReferencesGrouped match {
case references if references.noRecursion =>
holder.createErrorAnnotation(
functionNameId,
ScalaBundle.message("method.annotated.with.tailrec.contains.no.recursive.calls"),
Seq(removeAnnotationFix)
)
case references =>
for {
reference <- references.ordinaryRecursive
target = reference.getParent match {
case methodCall: ScMethodCall => methodCall
case _ => reference
}
} yield holder.createErrorAnnotation(
target,
ScalaBundle.message("recursive.call.not.in.tail.position"),
Seq(removeAnnotationFix)
)
}
}
}
val returnUsages = function.returnUsages
for (usage <- returnUsages) {
val explicitType = function.hasExplicitType
val hasAssign = function.hasAssign
val explicitReturn = usage.isInstanceOf[ScReturn]
if (explicitReturn && hasAssign && !explicitType) needsTypeAnnotation()
def needsTypeAnnotation(): Unit = {
val message = ScalaBundle.message("function.must.define.type.explicitly", function.name)
val returnTypes = returnUsages.collect {
case retStmt: ScReturn => retStmt.expr.flatMap(_.`type`().toOption).getOrElse(Any)
case expr: ScExpression => expr.`type`().getOrAny
}
holder.createErrorAnnotation(
usage.asInstanceOf[ScReturn].keyword,
message,
Seq(new AddReturnTypeFix(function, returnTypes.toSeq.lub()))
)
}
}
}
}
object FunctionAnnotator {
val TailrecAnnotationFQN = "scala.annotation.tailrec"
def canBeTailRecursive(function: ScFunctionDefinition): Boolean = function.getParent match {
case (_: ScTemplateBody) && Parent(Parent(owner: ScTypeDefinition)) =>
owner.isInstanceOf[ScObject] ||
owner.getModifierList.isFinal || {
function.getModifierList match {
case list => list.isPrivate || list.isFinal
}
}
case _ => true
}
def findTailRecursionAnnotation(function: ScFunctionDefinition): Option[ScAnnotation] =
function.annotations.find {
_.typeElement.`type`().exists(_.canonicalText == "_root_." + TailrecAnnotationFQN)
}
private final class RemoveAnnotationQuickFix(annotation: ScAnnotation) extends IntentionAction {
override def getFamilyName: String = ScalaBundle.message("family.name.remove.tailrec.annotation")
override def getText: String = getFamilyName
override def isAvailable(project: Project, editor: Editor, file: PsiFile): Boolean =
annotation.isValid
override def invoke(project: Project, editor: Editor, file: PsiFile): Unit =
annotation.delete()
override def startInWriteAction = true
}
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/annotator/FunctionAnnotator.scala | Scala | apache-2.0 | 5,146 |
package org.openstack.api.restful.ceilometer.v2.requests
import org.openstack.api.restful.ceilometer.v2.elements.AlarmState
/**
* @author Antonio Murgia
* @version 21/10/14
*/
case class AlarmStatePUTRequest(alarm_id : String, state : AlarmState) {
}
| tmnd1991/ceilometerAPI4s | src/main/scala/org/openstack/api/restful/ceilometer/v2/requests/AlarmStatePUTRequest.scala | Scala | apache-2.0 | 257 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.serializer
import java.io._
import java.lang.reflect.{Field, Method}
import java.security.AccessController
import scala.annotation.tailrec
import scala.collection.mutable
import scala.util.control.NonFatal
import org.apache.spark.Logging
private[spark] object SerializationDebugger extends Logging {
/**
* Improve the given NotSerializableException with the serialization path leading from the given
* object to the problematic object. This is turned off automatically if
* `sun.io.serialization.extendedDebugInfo` flag is turned on for the JVM.
*/
def improveException(obj: Any, e: NotSerializableException): NotSerializableException = {
if (enableDebugging && reflect != null) {
try {
new NotSerializableException(
e.getMessage + "\\nSerialization stack:\\n" + find(obj).map("\\t- " + _).mkString("\\n"))
} catch {
case NonFatal(t) =>
// Fall back to old exception
logWarning("Exception in serialization debugger", t)
e
}
} else {
e
}
}
/**
* Find the path leading to a not serializable object. This method is modeled after OpenJDK's
* serialization mechanism, and handles the following cases:
* - primitives
* - arrays of primitives
* - arrays of non-primitive objects
* - Serializable objects
* - Externalizable objects
* - writeReplace
*
* It does not yet handle writeObject override, but that shouldn't be too hard to do either.
*/
private[serializer] def find(obj: Any): List[String] = {
new SerializationDebugger().visit(obj, List.empty)
}
private[serializer] var enableDebugging: Boolean = {
!AccessController.doPrivileged(new sun.security.action.GetBooleanAction(
"sun.io.serialization.extendedDebugInfo")).booleanValue()
}
private class SerializationDebugger {
/** A set to track the list of objects we have visited, to avoid cycles in the graph. */
private val visited = new mutable.HashSet[Any]
/**
* Visit the object and its fields and stop when we find an object that is not serializable.
* Return the path as a list. If everything can be serialized, return an empty list.
*/
def visit(o: Any, stack: List[String]): List[String] = {
if (o == null) {
List.empty
} else if (visited.contains(o)) {
List.empty
} else {
visited += o
o match {
// Primitive value, string, and primitive arrays are always serializable
case _ if o.getClass.isPrimitive => List.empty
case _: String => List.empty
case _ if o.getClass.isArray && o.getClass.getComponentType.isPrimitive => List.empty
// Traverse non primitive array.
case a: Array[_] if o.getClass.isArray && !o.getClass.getComponentType.isPrimitive =>
val elem = s"array (class ${a.getClass.getName}, size ${a.length})"
visitArray(o.asInstanceOf[Array[_]], elem :: stack)
case e: java.io.Externalizable =>
val elem = s"externalizable object (class ${e.getClass.getName}, $e)"
visitExternalizable(e, elem :: stack)
case s: Object with java.io.Serializable =>
val elem = s"object (class ${s.getClass.getName}, $s)"
visitSerializable(s, elem :: stack)
case _ =>
// Found an object that is not serializable!
s"object not serializable (class: ${o.getClass.getName}, value: $o)" :: stack
}
}
}
private def visitArray(o: Array[_], stack: List[String]): List[String] = {
var i = 0
while (i < o.length) {
val childStack = visit(o(i), s"element of array (index: $i)" :: stack)
if (childStack.nonEmpty) {
return childStack
}
i += 1
}
return List.empty
}
/**
* Visit an externalizable object.
* Since writeExternal() can choose to add arbitrary objects at the time of serialization,
* the only way to capture all the objects it will serialize is by using a
* dummy ObjectOutput that collects all the relevant objects for further testing.
*/
private def visitExternalizable(o: java.io.Externalizable, stack: List[String]): List[String] =
{
val fieldList = new ListObjectOutput
o.writeExternal(fieldList)
val childObjects = fieldList.outputArray
var i = 0
while (i < childObjects.length) {
val childStack = visit(childObjects(i), "writeExternal data" :: stack)
if (childStack.nonEmpty) {
return childStack
}
i += 1
}
return List.empty
}
private def visitSerializable(o: Object, stack: List[String]): List[String] = {
// An object contains multiple slots in serialization.
// Get the slots and visit fields in all of them.
val (finalObj, desc) = findObjectAndDescriptor(o)
// If the object has been replaced using writeReplace(),
// then call visit() on it again to test its type again.
if (!finalObj.eq(o)) {
return visit(finalObj, s"writeReplace data (class: ${finalObj.getClass.getName})" :: stack)
}
// Every class is associated with one or more "slots", each slot refers to the parent
// classes of this class. These slots are used by the ObjectOutputStream
// serialization code to recursively serialize the fields of an object and
// its parent classes. For example, if there are the following classes.
//
// class ParentClass(parentField: Int)
// class ChildClass(childField: Int) extends ParentClass(1)
//
// Then serializing the an object Obj of type ChildClass requires first serializing the fields
// of ParentClass (that is, parentField), and then serializing the fields of ChildClass
// (that is, childField). Correspondingly, there will be two slots related to this object:
//
// 1. ParentClass slot, which will be used to serialize parentField of Obj
// 2. ChildClass slot, which will be used to serialize childField fields of Obj
//
// The following code uses the description of each slot to find the fields in the
// corresponding object to visit.
//
val slotDescs = desc.getSlotDescs
var i = 0
while (i < slotDescs.length) {
val slotDesc = slotDescs(i)
if (slotDesc.hasWriteObjectMethod) {
// If the class type corresponding to current slot has writeObject() defined,
// then its not obvious which fields of the class will be serialized as the writeObject()
// can choose arbitrary fields for serialization. This case is handled separately.
val elem = s"writeObject data (class: ${slotDesc.getName})"
val childStack = visitSerializableWithWriteObjectMethod(finalObj, elem :: stack)
if (childStack.nonEmpty) {
return childStack
}
} else {
// Visit all the fields objects of the class corresponding to the current slot.
val fields: Array[ObjectStreamField] = slotDesc.getFields
val objFieldValues: Array[Object] = new Array[Object](slotDesc.getNumObjFields)
val numPrims = fields.length - objFieldValues.length
slotDesc.getObjFieldValues(finalObj, objFieldValues)
var j = 0
while (j < objFieldValues.length) {
val fieldDesc = fields(numPrims + j)
val elem = s"field (class: ${slotDesc.getName}" +
s", name: ${fieldDesc.getName}" +
s", type: ${fieldDesc.getType})"
val childStack = visit(objFieldValues(j), elem :: stack)
if (childStack.nonEmpty) {
return childStack
}
j += 1
}
}
i += 1
}
return List.empty
}
/**
* Visit a serializable object which has the writeObject() defined.
* Since writeObject() can choose to add arbitrary objects at the time of serialization,
* the only way to capture all the objects it will serialize is by using a
* dummy ObjectOutputStream that collects all the relevant fields for further testing.
* This is similar to how externalizable objects are visited.
*/
private def visitSerializableWithWriteObjectMethod(
o: Object, stack: List[String]): List[String] = {
val innerObjectsCatcher = new ListObjectOutputStream
var notSerializableFound = false
try {
innerObjectsCatcher.writeObject(o)
} catch {
case io: IOException =>
notSerializableFound = true
}
// If something was not serializable, then visit the captured objects.
// Otherwise, all the captured objects are safely serializable, so no need to visit them.
// As an optimization, just added them to the visited list.
if (notSerializableFound) {
val innerObjects = innerObjectsCatcher.outputArray
var k = 0
while (k < innerObjects.length) {
val childStack = visit(innerObjects(k), stack)
if (childStack.nonEmpty) {
return childStack
}
k += 1
}
} else {
visited ++= innerObjectsCatcher.outputArray
}
return List.empty
}
}
/**
* Find the object to serialize and the associated [[ObjectStreamClass]]. This method handles
* writeReplace in Serializable. It starts with the object itself, and keeps calling the
* writeReplace method until there is no more.
*/
@tailrec
private def findObjectAndDescriptor(o: Object): (Object, ObjectStreamClass) = {
val cl = o.getClass
val desc = ObjectStreamClass.lookupAny(cl)
if (!desc.hasWriteReplaceMethod) {
(o, desc)
} else {
// write place
findObjectAndDescriptor(desc.invokeWriteReplace(o))
}
}
/**
* A dummy [[ObjectOutput]] that simply saves the list of objects written by a writeExternal
* call, and returns them through `outputArray`.
*/
private class ListObjectOutput extends ObjectOutput {
private val output = new mutable.ArrayBuffer[Any]
def outputArray: Array[Any] = output.toArray
override def writeObject(o: Any): Unit = output += o
override def flush(): Unit = {}
override def write(i: Int): Unit = {}
override def write(bytes: Array[Byte]): Unit = {}
override def write(bytes: Array[Byte], i: Int, i1: Int): Unit = {}
override def close(): Unit = {}
override def writeFloat(v: Float): Unit = {}
override def writeChars(s: String): Unit = {}
override def writeDouble(v: Double): Unit = {}
override def writeUTF(s: String): Unit = {}
override def writeShort(i: Int): Unit = {}
override def writeInt(i: Int): Unit = {}
override def writeBoolean(b: Boolean): Unit = {}
override def writeBytes(s: String): Unit = {}
override def writeChar(i: Int): Unit = {}
override def writeLong(l: Long): Unit = {}
override def writeByte(i: Int): Unit = {}
}
/** An output stream that emulates /dev/null */
private class NullOutputStream extends OutputStream {
override def write(b: Int) { }
}
/**
* A dummy [[ObjectOutputStream]] that saves the list of objects written to it and returns
* them through `outputArray`. This works by using the [[ObjectOutputStream]]'s `replaceObject()`
* method which gets called on every object, only if replacing is enabled. So this subclass
* of [[ObjectOutputStream]] enabled replacing, and uses replaceObject to get the objects that
* are being serializabled. The serialized bytes are ignored by sending them to a
* [[NullOutputStream]], which acts like a /dev/null.
*/
private class ListObjectOutputStream extends ObjectOutputStream(new NullOutputStream) {
private val output = new mutable.ArrayBuffer[Any]
this.enableReplaceObject(true)
def outputArray: Array[Any] = output.toArray
override def replaceObject(obj: Object): Object = {
output += obj
obj
}
}
/** An implicit class that allows us to call private methods of ObjectStreamClass. */
implicit class ObjectStreamClassMethods(val desc: ObjectStreamClass) extends AnyVal {
def getSlotDescs: Array[ObjectStreamClass] = {
reflect.GetClassDataLayout.invoke(desc).asInstanceOf[Array[Object]].map {
classDataSlot => reflect.DescField.get(classDataSlot).asInstanceOf[ObjectStreamClass]
}
}
def hasWriteObjectMethod: Boolean = {
reflect.HasWriteObjectMethod.invoke(desc).asInstanceOf[Boolean]
}
def hasWriteReplaceMethod: Boolean = {
reflect.HasWriteReplaceMethod.invoke(desc).asInstanceOf[Boolean]
}
def invokeWriteReplace(obj: Object): Object = {
reflect.InvokeWriteReplace.invoke(desc, obj)
}
def getNumObjFields: Int = {
reflect.GetNumObjFields.invoke(desc).asInstanceOf[Int]
}
def getObjFieldValues(obj: Object, out: Array[Object]): Unit = {
reflect.GetObjFieldValues.invoke(desc, obj, out)
}
}
/**
* Object to hold all the reflection objects. If we run on a JVM that we cannot understand,
* this field will be null and this the debug helper should be disabled.
*/
private val reflect: ObjectStreamClassReflection = try {
new ObjectStreamClassReflection
} catch {
case e: Exception =>
logWarning("Cannot find private methods using reflection", e)
null
}
private class ObjectStreamClassReflection {
/** ObjectStreamClass.getClassDataLayout */
val GetClassDataLayout: Method = {
val f = classOf[ObjectStreamClass].getDeclaredMethod("getClassDataLayout")
f.setAccessible(true)
f
}
/** ObjectStreamClass.hasWriteObjectMethod */
val HasWriteObjectMethod: Method = {
val f = classOf[ObjectStreamClass].getDeclaredMethod("hasWriteObjectMethod")
f.setAccessible(true)
f
}
/** ObjectStreamClass.hasWriteReplaceMethod */
val HasWriteReplaceMethod: Method = {
val f = classOf[ObjectStreamClass].getDeclaredMethod("hasWriteReplaceMethod")
f.setAccessible(true)
f
}
/** ObjectStreamClass.invokeWriteReplace */
val InvokeWriteReplace: Method = {
val f = classOf[ObjectStreamClass].getDeclaredMethod("invokeWriteReplace", classOf[Object])
f.setAccessible(true)
f
}
/** ObjectStreamClass.getNumObjFields */
val GetNumObjFields: Method = {
val f = classOf[ObjectStreamClass].getDeclaredMethod("getNumObjFields")
f.setAccessible(true)
f
}
/** ObjectStreamClass.getObjFieldValues */
val GetObjFieldValues: Method = {
val f = classOf[ObjectStreamClass].getDeclaredMethod(
"getObjFieldValues", classOf[Object], classOf[Array[Object]])
f.setAccessible(true)
f
}
/** ObjectStreamClass$ClassDataSlot.desc field */
val DescField: Field = {
val f = Class.forName("java.io.ObjectStreamClass$ClassDataSlot").getDeclaredField("desc")
f.setAccessible(true)
f
}
}
}
| andrewor14/iolap | core/src/main/scala/org/apache/spark/serializer/SerializationDebugger.scala | Scala | apache-2.0 | 15,863 |
package com.es.scala.chapter06
import org.specs2.mutable.Specification
class SetTest extends Specification {
"com.es.scala.chapter06.IntSet" should {
"verify that no element belongs to EmptySet" in {
new EmptySet().contains(1) must beFalse
}
"verify that correct elements belong to NonEmptySet" in {
val set = new EmptySet
val set1 = set.incl(3)
set1.contains(1) must beFalse
set1.contains(3) must beTrue
}
}
}
| elephantscale/learning-scala | ScalaByExample/src/test/scala/com/es/scala/chapter06/SetTest.scala | Scala | apache-2.0 | 463 |
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.3
* @date Sat Mar 22 14:39:30 EDT 2014
* @see LICENSE (MIT style license file).
*/
package scalation.random
import scala.math.floor
import scalation.util.{Error, time}
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `RNG` abstract class is the base class for all ScalaTion Random Number
* Generators. The subclasses must implement a 'gen' method that generates
* random real numbers in the range (0, 1). They must also implement an 'igen'
* methods to return stream values.
* @param stream the random number stream index
*/
abstract class RNG (stream: Int)
extends Error
{
if (stream < 0 || stream >= RandomSeeds.seeds.length) {
flaw ("constructor", "the stream must be in the range 0 to " + (RandomSeeds.seeds.length - 1))
} // if
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the theoretical mean for the random number generator's 'gen' method.
*/
val mean = 0.5
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the probability function (pf), i.e., the probability density
* function (pdf).
* @param z the mass point whose probability density is sought
*/
def pf (z: Double): Double = if (0.0 <= z && z <= 1.0) 1.0 else 0.0
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the next random number as a real `Double` in the interval (0, 1).
*/
def gen: Double
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the next stream value as an integer `Int`.
*/
def igen: Int
} // RNG class
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `RNGStream` object allows for random selection of streams for applications
* where reproducibility of random numbers is not desired.
*/
object RNGStream
{
/** Use Java's random number generator to randomly select one of ScalaTion's
* random number streams: 0 until `RandomSeeds`.seeds.length
* "If you use the nullary constructor, new Random(), then 'System.currentTimeMillis'
* will be used for the seed, which is good enough for almost all cases."
* @see stackoverflow.com/questions/22530702/what-is-seed-in-util-random
* @see docs.oracle.com/javase/8/docs/api/index.html
*/
private val javaRNG = new java.util.Random ()
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return a randomly selected random number stream.
*/
def ranStream: Int = javaRNG.nextInt (RandomSeeds.seeds.length)
} // RNGStream
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `RNGTest` object conducts three simple tests of the Random Number
* Generators: (1) Speed Test, (2) Means Test and (3) Chi-square Goodness of Fit Test.
* FIX: need to add (3) Variance Test and (4) K-S Goodness of Fit Test.
*/
object RNGTest extends App with Error
{
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Perform a Means Test (average of generated rn's close to mean for distribution).
* @param rn the random number generator to test
*/
def meansTest (rn: RNG)
{
println ("\\nTest the `" + rn.getClass.getSimpleName () + "` random number generator")
val tries = 5
val reps = 10000000
var sum = 0.0
for (i <- 0 until tries) {
time { for (i <- 0 until reps) sum += rn.gen }
println ("gen: sum = " + sum)
println ("rn.mean = " + rn.mean + " estimate = " + sum / reps.toDouble)
sum = 0.0
} // for
} // meansTest
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Perform a Chi-square Goodness of Fit Test. Compare the random number's
* histogram (generated by repeatedly calling 'gen') to the probability
* function pf (pdf).
* @param rn the random number generator to test
*/
def distrTest (rn: RNG)
{
println ("\\nTest the " + rn.getClass.getSimpleName () + " random number generator")
val nints = 50 // number of intervals
val reps = 1000000 // number of replications
val e = reps / nints // expected value: pf (x)
val sum = Array.ofDim [Int] (nints)
for (i <- 0 until reps) {
val j = floor (rn.gen * nints).toInt // interval number
if (0 <= j && j < nints) sum (j) += 1
} // for
var chi2 = 0.0 // sum up for Chi-square statistic
for (i <- sum.indices) {
val o = sum(i) // observed value: height of histogram
chi2 += (o - e)*(o - e) / e
print ("\\tsum (" + i + ") = " + o + " : " + e + " ")
if (i % 5 == 4) println ()
} // for
var n = nints - 1 // degrees of freedom
if (n < 2) flaw ("distrTest", "use more intervals to increase the degrees of freedom")
if (n > 49) n = 49
println ("\\nchi2 = " + chi2 + " : chi2(0.95, " + n + ") = " + Quantile.chiSquareInv (0.95, n))
} // distrTest
val generators = Array (Random (), Random2 (), Random3 ())
for (g <- generators) {
meansTest (g)
distrTest (g)
} // for
} // RNGTest
| NBKlepp/fda | scalation_1.3/scalation_mathstat/src/main/scala/scalation/random/RNG.scala | Scala | mit | 5,752 |
package com.mesosphere.cosmos.converter
import com.mesosphere.cosmos.label
import com.mesosphere.cosmos.rpc
import com.mesosphere.universe
import com.twitter.bijection.Bijection
object Label {
implicit val labelV1PackageMetadataToRpcV1InstalledPackageInformation:
Bijection[label.v1.model.PackageMetadata, rpc.v1.model.InstalledPackageInformation] = {
Bijection.build(fwd)(rev)
}
private[this] def fwd(x: label.v1.model.PackageMetadata) = {
rpc.v1.model.InstalledPackageInformation(
rpc.v1.model.InstalledPackageInformationPackageDetails(
packagingVersion = x.packagingVersion,
name = x.name,
version = x.version,
maintainer = x.maintainer,
description = x.description,
tags = x.tags,
selected = x.selected,
scm = x.scm,
website = x.website,
framework = x.framework,
preInstallNotes = x.preInstallNotes,
postInstallNotes = x.postInstallNotes,
postUninstallNotes = x.postUninstallNotes,
licenses = x.licenses
),
resourceDefinition = x.images.map(i => universe.v2.model.Resource(images = Some(i)))
)
}
private[this] def rev(x: rpc.v1.model.InstalledPackageInformation) = {
label.v1.model.PackageMetadata(
packagingVersion = x.packageDefinition.packagingVersion,
name = x.packageDefinition.name,
version = x.packageDefinition.version,
maintainer = x.packageDefinition.maintainer,
description = x.packageDefinition.description,
tags = x.packageDefinition.tags,
selected = x.packageDefinition.selected,
scm = x.packageDefinition.scm,
website = x.packageDefinition.website,
framework = x.packageDefinition.framework,
preInstallNotes = x.packageDefinition.preInstallNotes,
postInstallNotes = x.packageDefinition.postInstallNotes,
postUninstallNotes = x.packageDefinition.postUninstallNotes,
licenses = x.packageDefinition.licenses,
images = x.resourceDefinition.flatMap(_.images)
)
}
}
| takirala/cosmos | cosmos-server/src/main/scala/com/mesosphere/cosmos/converter/Label.scala | Scala | apache-2.0 | 2,041 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Mon Nov 14 2:34:38 EST 2011
* @see LICENSE (MIT style license file).
* @see http://www.scala-lang.org/node/724
*
* The `Int_Exp`, `Long_Exp` and `Double_Exp` classes provide extension methods:
* exponentiation, approximate equals and Unicode comparison operators.
* For efficiency, they are value classes that enrich `Int`, `Long` and 'Double`,
* respectively. The corresponding implicit conversions are in the `math` package object.
* @see stackoverflow.com/questions/14861862/how-do-you-enrich-value-classes-without-overhead
*/
package scalation.math
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Int_Exp` value class adds an exponentiation operator 'x ~^ y' and a
* 'near_eq' operator 'x =~ y' to `Int`.
* The '~^' has higher precedence than '*' or '/'.
* @param self the underlying object to be accessed via the self accessor
*/
class Int_Exp (val self: Int) extends AnyVal
{
def ~^ (y: Int) = scala.math.pow (self, y).toInt
def =~ (y: Double) = near_eq (self, y)
def !=~ (y: Double) = ! near_eq (self, y)
def ≠ (y: Int) = self != y
def ≤ (y: Int) = self <= y
def ≥ (y: Int) = self >= y
} // Int_Exp class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Long_Exp` value class adds an exponentiation operator 'x ~^ y' and a
* 'near_eq' operator 'x =~ y' to `Long`.
* The '~^' has higher precedence than '*' or '/'.
* @param self the underlying object to be accessed via the self accessor
*/
class Long_Exp (val self: Long) extends AnyVal
{
def ~^ (y: Long) = pow (self, y)
def =~ (y: Double) = near_eq (self, y)
def !=~ (y: Double) = ! near_eq (self, y)
def ≠ (y: Long) = self != y
def ≤ (y: Long) = self <= y
def ≥ (y: Long) = self >= y
} // Long_Exp class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Double_Exp` value class adds an exponentiation operator 'x ~^ y' and
* a 'near_eq' operator 'x =~ y' to `Double`.
* The '~^' has higher precedence than '*' or '/'.
* @param self the underlying object to be accessed via the self accessor
*/
class Double_Exp (val self: Double) extends AnyVal
{
def ~^ (y: Double) = scala.math.pow (self, y)
def =~ (y: Double) = near_eq (self, y)
def !=~ (y: Double) = ! near_eq (self, y)
def ≠ (y: Double) = self != y
def ≤ (y: Double) = self <= y
def ≥ (y: Double) = self >= y
} // Double_Exp class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `ExtensionTest` object is used to test the `Int_Exp`, `Long_Exp` and `Double_Exp`
* classes.
* > run-main scalation.math.ExtensionTest
*/
object ExtensionTest extends App
{
println (2 ~^ 3)
println (2l ~^ 3l)
println (2.0 ~^ 3.0)
} // ExtensionTest object
| NBKlepp/fda | scalation_1.2/src/main/scala/scalation/math/Extension.scala | Scala | mit | 3,067 |
/*
* Copyright 2014 porter <https://github.com/eikek/porter>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package porter.model
import porter.util.Hash
object DigestHa1 {
/**
* Creates a secret that holds the HA1 value used for authentication
* with the http digest method.
*
* @param name the secret name
* @param user the account
* @param realm
* @param plainPassword
* @return
*/
def create(name: Ident)(user: Ident, realm: String, plainPassword: String): Secret =
Secret(name, Hash.md5String(user.name +":"+ realm +":"+ plainPassword))
def apply(user: Ident, realm: String, plainPassword: String) = create("digestmd5.0")(user, realm, plainPassword)
}
| eikek/porter | api/src/main/scala/porter/model/DigestHa1.scala | Scala | apache-2.0 | 1,215 |
package org.hammerlab.guacamole.readsets.args
import org.kohsuke.args4j.spi.StringArrayOptionHandler
import org.kohsuke.args4j.{Argument, Option => Args4JOption}
/**
* Common command-line arguments for loading in one or more sets of reads, and associating a sample-name with each.
*/
trait Arguments extends Base {
@Argument(required = true, multiValued = true, usage = "FILE1 FILE2 FILE3")
var paths: Array[String] = Array()
@Args4JOption(name = "--sample-names", handler = classOf[StringArrayOptionHandler], usage = "name1 ... nameN")
var sampleNames: Array[String] = Array()
}
| hammerlab/guacamole | src/main/scala/org/hammerlab/guacamole/readsets/args/Arguments.scala | Scala | apache-2.0 | 594 |
package org.dhira.core.containers
import java.text.NumberFormat
import java.util
import java.util.{Random}
import org.aja.dhira.nn.containers.MapFactory.{HashMapFactory, IdentityHashMapFactory}
import org.dhira.core.containers.PriorityQueue
import scala.collection.generic.MapFactory
import scala.collection.mutable
/**
* @author Mageswaran Dhandapani
* @since 0.1 09-Sep-2016
* A map from objects to doubles. Includes convenience methods for getting,
* setting, and incrementing element counts. Objects not in the counter will
* return a count of zero. The counter is backed by a HashMap (unless specified
* otherwise with the MapFactory constructor).
*
* TODO: check getEntry related APIs in Scala env
*
* Quick reference for Scala <-> Java Interoperatibility
* scala.collection.Iterable <=> java.lang.Iterable
* scala.collection.Iterable <=> java.util.Collection
* scala.collection.Iterator <=> java.util.{ Iterator, Enumeration }
* scala.collection.mutable.Buffer <=> java.util.List
* scala.collection.mutable.Set <=> java.util.Set
* scala.collection.mutable.Map <=> java.util.{ Map, Dictionary }
* scala.collection.mutable.ConcurrentMap <=> java.util.concurrent.ConcurrentMap
* scala.collection.mutable.Vector <=> java.util.ArrayList
* scala.collection.Seq => java.util.List
* scala.collection.mutable.Seq => java.util.List
* scala.collection.Set => java.util.Set
* scala.collection.Map => java.util.Map
* java.util.Properties => scala.collection.mutable.Map[String, String]
* java.util.ArrayList => scala.collection.mutable.ArrayBuffer
*/
@SerialVersionUID(5L)
class Counter[T] extends Serializable {
var entries: scala.collection.mutable.Map[T, Double] = _
var mapFactory: MapFactory[T, Double] = new HashMapFactory[T, Double] //Default type
var defaultValue:Double = 0
var dirty: Boolean = false
var cacheTotal: Double = 0
def getDeflt(): Double = {
return defaultValue
}
def setDeflt(defaultValue: Double) {
this.defaultValue = defaultValue
}
/**
* The elements in the counter.
*
* @return applyTransformToDestination of keys
*/
def keySet(): Set[T] = {
entries.keySet
}
def entrySet(): Set[(T, Double)] = {
entries.toSet
}
/**
* The number of entries in the counter (not the total count -- use
* totalCount() instead).
*/
def size(): Int = {
entries.size
}
/**
* True if there are no entries in the counter (false does not mean
* totalCount > 0)
*/
def isEmpty(): Boolean = {
size == 0
}
/**
* Returns whether the counter contains the given key. Note that this is the
* way to distinguish keys which are in the counter with count zero, and
* those which are not in the counter (and will therefore return count zero
* from getCount().
*
* @param key
* @return whether the counter contains the key
*/
def containsKey(key: T): Boolean = {
return entries.contains(key)
}
/**
* Get the count of the element, or zero if the element is not in the
* counter.
*
* @param key
* @return
*/
def getCount(key: T): Double = {
entries.get(key).getOrElse(defaultValue)
}
/**
* I know, I know, this should be wrapped in a Distribution class, but it's
* such a common use...why not. Returns the MLE prob. Assumes all the counts
* are >= 0.0 and totalCount > 0.0. If the latter is false, return 0.0 (i.e.
* 0/0 == 0)
*
* @author Aria
* @param key
* @return MLE prob of the key
*/
def getProbability(key: T): Double = {
val count: Double = getCount(key)
val total: Double = totalCount
if (total < 0.0) {
throw new RuntimeException("Can't call getProbability() with totalCount < 0.0")
}
if (total > 0.0) count / total else 0.0
}
/**
* Destructively normalize this Counter in place.
*/
def normalize() {
val totalCount: Double = totalCount
import scala.collection.JavaConversions._
for (key <- keySet) {
setCount(key, getCount(key) / totalCount)
}
dirty = true
}
/**
* Set the count for the given key, clobbering any previous count.
*
* @param key
* @param count
*/
def setCount(key: T, count: Double) {
entries.put(key,count)
dirty = true
}
/**
* Set the count for the given key if it is larger than the previous one;
*
* @param key
* @param count
*/
def put(key: T, count: Double, keepHigher: Boolean) {
if (keepHigher && entries.contains(key)) {
val oldCount: Double = entries(key)
if (count > oldCount) {
entries.put(key, count)
}
}
else {
entries.put(key, count)
}
dirty = true
}
/**
* Will return a sample from the counter, will throw exception if any of the
* counts are < 0.0 or if the totalCount() <= 0.0
*/
def sample(rand: Random): T = {
val total: Double = totalCount
if (total <= 0.0) {
throw new RuntimeException(String.format("Attempting to sample() with totalCount() %.3f%n", total))
}
var sum: Double = 0.0
val r: Double = rand.nextDouble
import scala.collection.JavaConversions._
for (entry <- entries.entrySet) {
val count: Double = entry.getValue
val frac: Double = count / total
sum += frac
if (r < sum) {
return entry.getKey
}
}
throw new IllegalStateException("Shoudl've have returned a sample by now....")
}
/**
* Will return a sample from the counter, will throw exception if any of the
* counts are < 0.0 or if the totalCount() <= 0.0
*
* @return
*
* @author aria42
*/
def sample(): T = {
sample(new Random)
}
def removeKey(key: T) {
setCount(key, 0.0)
dirty = true
removeKeyFromEntries(key)
}
/**
* @param key
*/
protected def removeKeyFromEntries(key: T) {
entries.remove(key)
}
/**
* Set's the key's count to the maximum of the current count and val. Always
* sets to val if key is not yet present.
*
* @param key
* @param value
*/
def setMaxCount(key: T, value: Double) {
val mapValue: Double = entries.get(key).getOrElse(defaultValue)
if (value > mapValue) {
setCount(key, value)
dirty = true
}
}
/**
* Set's the key's count to the minimum of the current count and val. Always
* sets to val if key is not yet present.
*
* @param key
* @param value
*/
def setMinCount(key: T, value: Double) {
val value: Double = entries.get(key).getOrElse(defaultValue)
if (value < value) {
setCount(key, value)
dirty = true
}
}
/**
* Increment a key's count by the given amount.
*
* @param key
* @param increment
*/
def incrementCount(key: T, increment: Double): Double = {
val newVal: Double = getCount(key) + increment
setCount(key, newVal)
dirty = true
newVal
}
/**
* Increment each element in a given collection by a given amount.
*/
def incrementAll(collection: Iterable[_ <: T], count: Double) {
for (key <- collection) {
incrementCount(key, count)
}
dirty = true
}
def incrementAll[U <: T](counter: Counter[U]) {
for (key <- counter.keySet()) {
val count: Double = counter.getCount(key)
incrementCount(key, count)
}
dirty = true
}
/**
* Finds the total of all counts in the counter. This implementation
* iterates through the entire counter every time this method is called.
*
* @return the counter's total
*/
def totalCount(): Double = {
if (!dirty) {
return cacheTotal
}
var total: Double = 0.0
import scala.collection.JavaConversions._
for (entry <- entries.entrySet) {
total += entry.getValue
}
cacheTotal = total
dirty = false
return total
}
def getSortedKeys(): List[T] = {
entries.keys.toList
}
/**
* Finds the key with maximum count. This is a linear operation, and ties
* are broken arbitrarily.
*
* @return a key with minumum count
*/
def argMax(): T = {
var maxCount: Double = Double.NegativeInfinity
var maxKey: T = null.asInstanceOf[T] //???
import scala.collection.JavaConversions._
for (entry <- entries.entrySet) {
if (entry.getValue > maxCount || maxKey == null) {
maxKey = entry.getKey
maxCount = entry.getValue
}
}
maxKey
}
def min: Double = {
return maxMinHelp(false)
}
def max: Double = {
return maxMinHelp(true)
}
private def maxMinHelp(max: Boolean): Double = {
var maxCount: Double = if (max) Double.NegativeInfinity else Double.PositiveInfinity
import scala.collection.JavaConversions._
for (entry <- entries.entrySet) {
if ((max && entry.getValue > maxCount) || (!max && entry.getValue < maxCount)) {
maxCount = entry.getValue
}
}
return maxCount
}
/**
* Returns a string representation with the keys ordered by decreasing
* counts.
*
* @return string representation
*/
override def toString: String = {
toString(keySet.size)
}
def toStringSortedByKeys: String = {
val sb: StringBuilder = new StringBuilder("[")
val f: NumberFormat = NumberFormat.getInstance
f.setMaximumFractionDigits(5)
var numKeysPrinted: Int = 0
import scala.collection.JavaConversions._
for (element <- new java.util.TreeSet[T](keySet)) {
sb.append(element.toString)
sb.append(" : ")
sb.append(f.format(getCount(element)))
if (numKeysPrinted < size - 1) sb.append(", ")
numKeysPrinted += 1
}
if (numKeysPrinted < size) sb.append("...")
sb.append("]")
return sb.toString
}
/**
* Returns a string representation which includes no more than the
* maxKeysToPrint elements with largest counts.
*
* @param maxKeysToPrint
* @return partial string representation
*/
def toString(maxKeysToPrint: Int): String = {
//return asPriorityQueue.toString(maxKeysToPrint, false) TODO implement
new String("TODO")
}
/**
* Returns a string representation which includes no more than the
* maxKeysToPrint elements with largest counts and optionally prints
* one element per line.
*
* @param maxKeysToPrint
* @return partial string representation
*/
def toString(maxKeysToPrint: Int, multiline: Boolean): String = {
//asPriorityQueue.toString(maxKeysToPrint, multiline) TODO implement
new String("TODO")
}
def this(mf: MapFactory[T, Double]) {
this()
this.mapFactory = mf
this.entries = mapFactory.buildMap
}
def this(identityHashMap: Boolean) {
this(if (identityHashMap) new MapFactory.IdentityHashMapFactory[T,Double]
else new HashMapFactory[T, Double])
}
def this(mapCounts: Map[_ <: T, Double]) {
this()
this.entries = new mutable.HashMap[T, Double]
import scala.collection.JavaConversions._
for (entry <- mapCounts.entrySet) {
incrementCount(entry.getKey, entry.getValue)
}
}
def this(counter: Counter[T]) {
this()
incrementAll(counter)
}
def this(collection: Iterable[_ <: T]) {
this()
incrementAll(collection, 1.0)
}
def pruneKeysBelowThreshold(cutoff: Double) {
entries = entries.filter(kv => kv._2 < cutoff)
dirty = true
}
def getEntrySet(): Set[(T, Double)] = {
return entries.toSet
}
def isEqualTo(counter: Counter): Boolean = {
var tmp: Boolean = true
val bigger: Counter = if (counter.size > size) counter else this
import scala.collection.JavaConversions._
for (e <- bigger.keySet) {
tmp &= counter.getCount(e) eq getCount(e)
}
return tmp
}
def clear {
entries = mapFactory.buildMap
dirty = true
}
/**
* Builds a priority queue whose elements are the counter's elements, and
* whose priorities are those elements' counts in the counter.
*/
// def asPriorityQueue: PriorityQueue[T] = {
// val pq: PriorityQueue[T] = new PriorityQueue[T](entries.size)
// import scala.collection.JavaConversions._
// for (entry <- entries.entrySet) {
// pq.add(entry.getKey, entry.getValue)
// }
// return pq
// }
def asPriorityQueue() = {
import scala.collection.immutable.ListMap
// low to high
ListMap(entries.toSeq.sortWith(_._2 < _._2):_*).toMap
}
/**
* Warning: all priorities are the negative of their counts in the counter
* here
*
* @return
*/
// def asMinPriorityQueue: PriorityQueue[T] = {
// val pq: PriorityQueue[T] = new PriorityQueue[T](entries.size)
// import scala.collection.JavaConversions._
// for (entry <- entries.entrySet) {
// pq.add(entry.getKey, -entry.getValue)
// }
// return pq
// }
def asMinPriorityQueue() = {
import scala.collection.immutable.ListMap
// high to low
ListMap(entries.toSeq.sortWith(_._2 > _._2):_*).toMap
}
def keepTopNKeys(keepN: Int) {
keepKeysHelper(keepN, true)
}
def keepBottomNKeys(keepN: Int) {
keepKeysHelper(keepN, false)
}
private def keepKeysHelper(keepN: Int, top: Boolean) {
val tmp: Counter[T] = new Counter[T]()
var n: Int = 0
val iter = mutable.Iterable(if (top) asPriorityQueue else asMinPriorityQueue)
iter.take(keepN) //TODO check this!
// for (e <- mutable.Iterable(if (top) asPriorityQueue else asMinPriorityQueue)) {
// if (n <= keepN) tmp.setCount(e, getCount(e))
// n += 1
// }
clear
incrementAll(tmp)
dirty = true
}
/**
* Sets all counts to the given value, but does not remove any keys
*/
def setAllCounts(value: Double) {
import scala.collection.JavaConversions._
for (e <- keySet) {
setCount(e, value)
}
}
def dotProduct(other: Counter[T]): Double = {
val thizCounts: List[Double] = getEntrySet().map(_._2).filter(_ != 0.0).toList
val otherCounts: List[Double] = other.getEntrySet().map(_._2).filter(_ != 0.0).toList
thizCounts.zip(otherCounts).map(xy => xy._1 * xy._2).sum //TODO test
}
def scale(c: Double) {
entries.map(_._2 * c)
}
def scaledClone(c: Double): Counter[T] = {
val newCounter: Counter[T] = new Counter[T]()
entrySet().map(kv => newCounter.setCount(kv._1, kv._2 * c))
newCounter //TODO test
}
def difference(counter: Counter[T]): Counter[T] = {
val clone: Counter[T] = new Counter[T](this)
for (key <- counter.keySet) {
val count: Double = counter.getCount(key)
clone.incrementCount(key, -1 * count)
}
clone
}
def toLogSpace: Counter[T] = {
val newCounter: Counter[T] = new Counter[T](this)
import scala.collection.JavaConversions._
for (key <- newCounter.keySet) {
newCounter.setCount(key, Math.log(getCount(key)))
}
return newCounter
}
def approxEquals(other: Counter, tol: Double): Boolean = {
import scala.collection.JavaConversions._
for (key <- keySet) {
if (Math.abs(getCount(key) - other.getCount(key)) > tol) return false
}
import scala.collection.JavaConversions._
for (key <- other.keySet) {
if (Math.abs(getCount(key) - other.getCount(key)) > tol) return false
}
return true
}
def setDirty(dirty: Boolean) {
this.dirty = dirty
}
def toStringTabSeparated: String = {
val sb: StringBuilder = new StringBuilder
import scala.collection.JavaConversions._
for (key <- getSortedKeys) {
sb.append(key.toString + "\\t" + getCount(key) + "\\n")
}
return sb.toString
}
def canEqual(a: Any) = a.isInstanceOf[Counter[T]]
override def equals(other: Any): Boolean = {
other match {
case that: Counter[T] => getClass().ne(that.getClass) &&
dirty != that.dirty && that.cacheTotal != cacheTotal &&
that.defaultValue != defaultValue &&
!(if (entries != null) !(entries == that.entries) else that.entries != null)
case _ => false
} //TODO check this!
}
override def hashCode: Int = {
var result: Int = 0
var temp: Long = 0L
result = if (entries != null) entries.hashCode else 0
result = 31 * result + (if (dirty) 1 else 0)
temp = java.lang.Double.doubleToLongBits(cacheTotal) //TODO doubleToLongBits???
result = 31 * result + (temp ^ (temp >>> 32)).toInt
result = 31 * result + (if (mapFactory != null) mapFactory.hashCode else 0)
temp = java.lang.Double.doubleToLongBits(defaultValue) //TODO doubleToLongBits???
result = 31 * result + (temp ^ (temp >>> 32)).toInt
return result
}
}
| Mageswaran1989/aja | src/main/scala/org/aja/dhira/src/main/scala/org/dhira/core/containers/Counter.scala | Scala | apache-2.0 | 16,488 |
package repository
import repository.hashmap.HashMapRepository
import model.Url
import repository.hashmap.HashMapUrlRepository
import org.apache.commons.lang.RandomStringUtils
import org.junit.Test
import junit.framework.Assert
class HashMapUrlRepositoryTest extends HashMapRepositoryTestBase[Url] {
lazy val repository = new HashMapUrlRepository
def createEntity: Url = target.save(new Url(path = RandomStringUtils.randomAscii(128)))
def target: HashMapUrlRepository = repository
@Test
def testUpdate = {
val url = createEntity
val saved = target.save(url)
val newPath = RandomStringUtils.randomAscii(128)
val updated = target.save(saved.copy(path = newPath))
val list = target.findAll()
Assert.assertEquals(1, list.size)
Assert.assertEquals(newPath, list(0).path)
}
} | tiborbotos/domino | domino-crawler/src/test/scala/repository/HashMapUrlRepositoryTest.scala | Scala | lgpl-3.0 | 818 |
/*
* Copyright 2014 Nick Edwards.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.njeuk.dbmapper.macros
/**
* ScalaToSqlNameConversion provides the mapping from the Scala identifier to the Database identifier
*
* @note Really need to make this more flexible for different naming schemes.
* Maybe use Twitter's Scala Eval code to make pass in the algorithm, not sure
* on the impact on compile speed. Don't want to invent a DSL for it. Don't think
* Scala's RegEx is able to do it either.
*
*/
object ScalaToSqlNameConversion {
/**
* Converts from a Scala identifier to an identifier in the database.
* Read the code for the algorithm, everything goes to lowercase snake_case, samples are:
*
* `camelCase -> camel_case
* PascalCase -> pascal_case
* ID -> id`
*
* @param scalaName the scala identifier
* @return the identifier in the database
*/
def convert(scalaName: String): String = {
if (scalaName.length == 0) ""
else if (scalaName.length == 1) scalaName.toLowerCase
else (scalaName.foldLeft("")((a: String, b: Char) => a + (if (b.isUpper && a.length > 1) "_" + b else b))).toLowerCase
}
}
| njeuk/dbmapper-macros | src/main/scala/com/github/njeuk/dbmapper/macros/ScalaToSqlNameConversion.scala | Scala | apache-2.0 | 1,711 |
/*
* Copyright 2015 eleflow.com.br.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eleflow.sparknotebook
import java.io.{FileNotFoundException, InputStream, OutputStream}
import java.net.URI
import com.amazonaws.services.s3.model.{GetObjectRequest, ObjectMetadata, PutObjectRequest, S3Object}
import com.amazonaws.services.s3.{AmazonS3, AmazonS3Client}
import eleflow.sparknotebook.data.Dataset
import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, FileUtil, FileSystem, Path}
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.hive.thriftserver.HiveThriftServer2
import org.apache.spark.{Logging, SparkConf, SparkContext}
import scala.annotation.tailrec
import scala.sys.process._
import scala.util.Try
import scala.util.matching.Regex
object ClusterSettings {
var kryoBufferMaxSize: Option[String] = None
var maxResultSize = "2g"
var masterInstanceType = "r3.large"
var coreInstanceType = "r3.large"
var coreInstanceCount = 3
var spotPriceFactor: Option[String] = Some("1.3")
var ec2KeyName: Option[String] = None
var hadoopVersion = "2"
var clusterName = "SparkNotebookCluster"
var region: Option[String] = None
var profile: Option[String] = None
var resume = false
var executorMemory: Option[String] = None
var defaultParallelism: Option[Int] = None
var master: Option[String] = None
def slavesCores = ClusterSettings.coreInstanceType match {
case s: String if s.endsWith("xlarge") => 4
case s: String if s.endsWith("2xlarge") => 8
case s: String if s.endsWith("4xlarge") => 16
case s: String if s.endsWith("8xlarge") => 32
case _ => 2
}
def getNumberOfCores = ClusterSettings.coreInstanceCount * slavesCores
}
/**
* User: paulomagalhaes
* Date: 8/15/14 12:24 PM
*/
class SparkNotebookContext(@transient sparkConf: SparkConf) extends Serializable with Logging {
val version = SparkNotebookVersion.version
protected def this(sparkConf: SparkConf, data: String) = this(sparkConf)
@transient protected lazy val s3Client: AmazonS3 = new AmazonS3Client()
@transient protected var sc: Option[SparkContext] = None
@transient var _sqlContext: Option[HiveContext] = None
private var _masterHost: Option[String] = None
protected val basePath: String = "/"
def sparkContext(): SparkContext = sc getOrElse {
val context = if (ClusterSettings.master.isDefined) createSparkContextForProvisionedCluster(sparkConf)
else createSparkContextForNewCluster(sparkConf)
addClasspathToSparkContext(context)
sc = Some(context)
context
}
def addClasspathToSparkContext(context: SparkContext) {
val jodaJar = "joda-time.joda-time-.*jar".r
val sparkNotebookContextJar = "eleflow.sparknotebook-.*jar".r
val guavaJar = "com.google.guava.*".r
val mySqlDriver = "mysql-connector-java.*".r
val urls = this.getClass().getClassLoader().asInstanceOf[java.net.URLClassLoader].getURLs
val jarUrls = urls.filter(url =>
jodaJar.findFirstIn(url.getFile) != None
|| sparkNotebookContextJar.findFirstIn(url.getFile) != None
|| guavaJar.findFirstIn(url.getFile) != None
|| mySqlDriver.findFirstIn(url.getFile) != None)
jarUrls.foreach { url =>
logInfo(s"adding ${url.getPath} to spark context jars")
context.addJar(url.getPath)
}
}
def masterHost(): String = {
return _masterHost match {
case Some(host) => host
case None => {
initHostNames
_masterHost.get
}
}
}
def initHostNames {
_masterHost = createCluster();
}
def masterHost_=(host: String): Unit = _masterHost = Some(host)
def sqlContext(): HiveContext = {
_sqlContext match {
case None => {
_sqlContext = Some(new HiveContext(sparkContext));
HiveThriftServer2.startWithContext(_sqlContext.get)
_sqlContext.get
}
case Some(ctx) => ctx
}
}
def createSparkContextForNewCluster(conf: SparkConf): SparkContext = {
log.info(s"connecting to $masterHost")
conf.setMaster(s"spark://$masterHost:7077")
confSetup(conf)
}
private def confSetup(conf: SparkConf): SparkContext = {
ClusterSettings.defaultParallelism.map(value => conf.set("spark.default.parallelism", value.toString))
ClusterSettings.kryoBufferMaxSize.map(value => conf.set("spark.kryoserializer.buffer.max.mb", value.toString))
//according to keo, in Making Sense of Spark Performance webcast, this codec is better than default
conf.set("spark.io.compression.codec", "lzf")
conf.set("spark.driver.maxResultSize", ClusterSettings.maxResultSize)
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
ClusterSettings.executorMemory.foreach(conf.set("spark.executor.memory", _))
val defaultConfStream = this.getClass.getClassLoader.getResourceAsStream("spark-defaults.conf")
if (defaultConfStream != null) {
import scala.collection.JavaConversions._
val defaultConf = IOUtils.readLines(defaultConfStream)
defaultConf.map { line =>
val keyValue = line.split("\\\\s+")
if (keyValue.size == 2)
conf.set(keyValue(0), keyValue(1))
}
}
//according to keo, in Making Sense of Spark Performance webcast, this codec is better than default
conf.set("spark.io.compression.codec", "lzf")
ClusterSettings.defaultParallelism.map(value => conf.set("spark.default.parallelism", value.toString))
ClusterSettings.kryoBufferMaxSize.map(value => conf.set("spark.kryoserializer.buffer.max.mb", value.toString))
conf.set("spark.driver.maxResultSize", ClusterSettings.maxResultSize)
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
ClusterSettings.executorMemory.foreach(conf.set("spark.executor.memory", _))
println("sparkcontext")
new SparkContext(conf)
}
def createSparkContextForProvisionedCluster(conf: SparkConf): SparkContext = {
log.info("connecting to localhost")
conf.setMaster(ClusterSettings.master.get)
confSetup(conf)
}
def shellRun(command: Seq[String]) = {
val out = new StringBuilder
val logger = ProcessLogger(
(o: String) => {
out.append(o);
logInfo(o)
},
(e: String) => {
println(e);
logInfo(e)
})
command ! logger
out.toString()
}
def createCluster(): Option[String] = {
val path = getClass.getResource(s"${basePath}spark_ec2.py").getPath
import ClusterSettings._
val mandatory = Seq(path,
"--hadoop-major-version", hadoopVersion,
"--master-instance-type", masterInstanceType,
"--slaves", coreInstanceCount.toString,
"--instance-type", coreInstanceType)
val command = mandatory ++ (ec2KeyName match {
case None => Seq[String]()
case Some(ec2KeyName) => Seq("--key-pair", ec2KeyName)
}) ++ (spotPriceFactor match {
case None => Seq[String]()
case Some(spotPrice) => Seq("--spot-price", spotPrice.toString)
}) ++ (region match {
case None => Seq[String]()
case Some(region) => Seq("--region", region.toString)
}) ++ (profile match {
case None => Seq[String]()
case Some(profile) => Seq("--profile", profile.toString)
}) ++ (if (resume) Seq("--resume") else Seq())
val output = shellRun((command ++ Seq("launch", clusterName)))
val pattern = new Regex("Spark standalone cluster started at http://([^:]+):8080")
val host = pattern.findAllIn(output).matchData.map(_.group(1)).next
return Some(host)
}
def terminate() {
clearContext
val path = getClass.getResource(s"${basePath}spark_ec2.py").getPath
import ClusterSettings._
val output = shellRun(Seq(path, "destroy", clusterName))
_masterHost = None
ClusterSettings.resume = false
}
def clusterInfo() {
val path = getClass.getResource(s"${basePath}spark_ec2.py").getPath
import ClusterSettings._
val output = shellRun(Seq(path, "get-master", clusterName))
}
def clearContext {
ClusterSettings.resume = true
sc.map {
f =>
f.cancelAllJobs()
f.stop()
}
_sqlContext = None
sc = None
}
def reconnect(): Unit = {
sc.map(_.stop())
sc = None
_sqlContext = None
}
def getAllFilesRecursively(path: Path): Seq[String] = {
val fs = path.getFileSystem(new Configuration)
@tailrec
def iter(fs: FileSystem, paths: Seq[Path], result: Seq[String]): Seq[String] = paths match {
case path :: tail =>
val children: Seq[FileStatus] = try {
fs.listStatus(path)
} catch {
case e: FileNotFoundException =>
// listStatus throws FNFE if the dir is empty
Seq.empty[FileStatus]
}
val (files, directories) = children.partition(_.isFile)
iter(fs, tail ++ directories.map(_.getPath), files.map(_.getPath.toString) ++ result)
case _ =>
result
}
iter(fs, Seq(path), Seq())
}
def copyDir(input: String, output: String): Unit = {
val from = createPathInstance(input)
val files = getAllFilesRecursively(from)
val to = output.replaceAll(new URI(input).getPath, "")
copyDir(files, to)
}
def copyDir(inputFiles: Seq[String], output: String): Unit = {
sparkContext.parallelize(inputFiles).foreach { inputFile =>
val from = new URI(inputFile)
copy(inputFile, s"$output/${from.getPath}")
}
}
def copy(input: String, output: String): Unit = {
val from = new URI(input)
val to = new URI(output)
val fromScheme = from.getScheme
val toScheme = to.getScheme
val conf = new Configuration()
(fromScheme, toScheme) match {
case ("s3n" | "s3", "s3n" | "s3") => ???
case (fromAddr, _) if (fromAddr.startsWith("s3")) => {
val outputPath = createPathInstance(output)
val fs = createPathInstance(output).getFileSystem(conf)
copyFromS3(from, outputPath, fs)
}
case _ => {
val srcPath = createPathInstance(input)
val srcFs = srcPath.getFileSystem(conf)
val dstPath = createPathInstance(output)
val dstFs = dstPath.getFileSystem(conf)
FileUtil.copy(srcFs, srcPath, dstFs, dstPath, false, conf)
}
}
}
def fs(pathStr: String): FileSystem = {
val path = createPathInstance(pathStr)
path.getFileSystem(new Configuration)
}
def sql(sql: String) = {
sqlContext().sql(sql)
}
protected def copyFromS3(input: URI, path: Path, fs: FileSystem): Unit = {
val rangeObjectRequest: GetObjectRequest = new GetObjectRequest(input.getHost, input.getPath.substring(1))
val inputStream: Try[InputStream] = Try {
val objectPortion: S3Object = s3Client.getObject(rangeObjectRequest)
objectPortion.getObjectContent()
}
inputStream.map {
in =>
val copyResult = Try(fs.create(path)).flatMap {
out =>
val copyResult = copyStreams(in, out)
out.close
copyResult
}
in.close
copyResult
}.recover {
case e: Exception => throw e
}
}
protected def createPathInstance(input: String) = new Path(input)
protected def copyStreams(in: InputStream, out: OutputStream) = Try(IOUtils.copy(in, out))
protected def copyToS3(input: Path, bucket: String, fileName: String): Unit = {
val objRequest = new PutObjectRequest(bucket, fileName, readFromHDFS(input), new ObjectMetadata())
s3Client.putObject(objRequest)
}
private def readFromHDFS(input: Path) = {
val fs = input.getFileSystem(new Configuration)
fs.open(input)
}
def load(file: String, separator: String = ",") = {
Dataset(this, file, separator)
}
}
| eleflow/sparknotebook | src/main/scala/eleflow/sparknotebook/SparkNotebookContext.scala | Scala | apache-2.0 | 12,249 |
package adt.bson.mongo.bulk
import adt.bson.BsonObject
import scala.language.implicitConversions
/**
* An abstract base class for a write request (substitutes [[com.mongodb.bulk.WriteRequest]]).
*/
sealed trait WriteRequest {
def toJavaWriteRequest: JavaWriteRequest
}
object WriteRequest {
implicit def from(request: JavaWriteRequest): WriteRequest = request match {
case delete: JavaDeleteRequest => DeleteRequest(delete.getFilter.toBsonObject, delete.isMulti)
case insert: JavaInsertRequest => InsertRequest(insert.getDocument.toBsonObject)
case update: JavaUpdateRequest if update.getType == JavaWriteRequest.Type.UPDATE =>
UpdateRequest(update.getFilter.toBsonObject, update.getUpdate.toBsonObject, update.isMulti, update.isUpsert)
case replace: JavaUpdateRequest if replace.getType == JavaWriteRequest.Type.REPLACE =>
ReplaceRequest(replace.getFilter.toBsonObject, replace.getUpdate.toBsonObject, replace.isUpsert)
}
}
/**
* A representation of a delete (substitutes [[com.mongodb.bulk.WriteRequest]]).
*
* @param filter the query filter
* @param multi whether to delete multiple documents matching this filter
*/
case class DeleteRequest(filter: BsonObject, multi: Boolean = true) extends WriteRequest {
final override def toJavaWriteRequest: JavaDeleteRequest =
new JavaDeleteRequest(filter.toJavaBsonDocument).multi(multi)
}
/**
* A representation of a document to insert (substitutes [[com.mongodb.bulk.InsertRequest]]).
*
* @param document the document to insert
*/
case class InsertRequest(document: BsonObject) extends WriteRequest {
final override def toJavaWriteRequest: JavaInsertRequest = new JavaInsertRequest(document.toJavaBsonDocument)
}
/**
* An update to one or more documents (substitutes [[com.mongodb.bulk.UpdateRequest]] with an update command).
*
* @param filter the query filter
* @param update the update operations
* @param multi whether this update will update all documents matching the filter
* @param upsert whether this update will insert a new document if no documents match the filter
*/
case class UpdateRequest(
filter: BsonObject,
update: BsonObject,
multi: Boolean = true,
upsert: Boolean = false) extends WriteRequest {
final override def toJavaWriteRequest: JavaUpdateRequest =
new JavaUpdateRequest(
filter.toJavaBsonDocument,
update.toJavaBsonDocument,
JavaWriteRequest.Type.UPDATE)
.multi(multi)
.upsert(upsert)
final def toReplaceRequest: ReplaceRequest = ReplaceRequest(filter, update, upsert)
}
/**
* An update to one or more documents (substitutes [[com.mongodb.bulk.UpdateRequest]] with a replace command).
*
* @param filter the query filter
* @param update the update operations
* @param upsert whether this update will insert a new document if no documents match the filter
*/
case class ReplaceRequest(
filter: BsonObject,
update: BsonObject,
upsert: Boolean = false) extends WriteRequest {
final override def toJavaWriteRequest: JavaUpdateRequest =
new JavaUpdateRequest(
filter.toJavaBsonDocument,
update.toJavaBsonDocument,
JavaWriteRequest.Type.REPLACE)
.upsert(upsert)
def toUpdateRequest: UpdateRequest = UpdateRequest(filter, update, upsert = upsert)
}
| jeffmay/bson-adt | bson-adt-mongo3-async/src/main/scala/adt/bson/mongo/bulk/WriteRequest.scala | Scala | apache-2.0 | 3,276 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.aliyun.dts
import java.{util => ju}
import java.util.{Locale, Optional, Properties}
import scala.collection.JavaConverters._
import org.apache.commons.cli.MissingArgumentException
import org.apache.kafka.clients.CommonClientConfigs
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.common.config.SaslConfigs
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
import org.apache.spark.sql.execution.streaming.Source
import org.apache.spark.sql.sources.{BaseRelation, DataSourceRegister, RelationProvider, StreamSourceProvider}
import org.apache.spark.sql.sources.v2.{DataSourceOptions, MicroBatchReadSupport}
import org.apache.spark.sql.sources.v2.reader.streaming.MicroBatchReader
import org.apache.spark.sql.types._
class DTSSourceProvider extends DataSourceRegister
with RelationProvider
with StreamSourceProvider
with MicroBatchReadSupport {
override def shortName(): String = "dts"
override def sourceSchema(
sqlContext: SQLContext,
schema: Option[StructType],
providerName: String,
parameters: Map[String, String]): (String, StructType) = {
(shortName(), DTSSourceProvider.getSchema)
}
override def createSource(
sqlContext: SQLContext,
metadataPath: String,
schema: Option[StructType],
providerName: String,
parameters: Map[String, String]): Source = {
throw new UnsupportedOperationException("Do not support DTS data source v1")
}
override def createMicroBatchReader(
schema: Optional[StructType],
checkpointLocation: String,
options: DataSourceOptions): MicroBatchReader = {
DTSSourceProvider.checkOptions(options)
val dtsOffsetReader = new DTSOffsetReader(options)
val parameters = options.asMap().asScala.toMap
val caseInsensitiveParams = parameters.map { case (k, v) => (k.toLowerCase(Locale.ROOT), v) }
val startingStreamOffsets = DTSSourceProvider.getDTSOffsetRangeLimit(caseInsensitiveParams,
DTSSourceProvider.STARTING_OFFSETS_OPTION_KEY, LatestOffsetRangeLimit)
new DTSMicroBatchReader(dtsOffsetReader, checkpointLocation, startingStreamOffsets, options)
}
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String]): BaseRelation = {
val caseInsensitiveParameters = CaseInsensitiveMap(parameters)
val specifiedKafkaParams = convertToSpecifiedParams(caseInsensitiveParameters)
val startingRelationOffsets = DTSSourceProvider.getDTSOffsetRangeLimit(
parameters, DTSSourceProvider.STARTING_OFFSETS_OPTION_KEY, EarliestOffsetRangeLimit)
assert(startingRelationOffsets != LatestOffsetRangeLimit)
val endingRelationOffsets = DTSSourceProvider.getDTSOffsetRangeLimit(
parameters, DTSSourceProvider.ENDING_OFFSETS_OPTION_KEY, LatestOffsetRangeLimit)
assert(endingRelationOffsets != EarliestOffsetRangeLimit)
new DTSRelation(
sqlContext,
caseInsensitiveParameters,
specifiedKafkaParams,
startingRelationOffsets,
endingRelationOffsets)
}
private def convertToSpecifiedParams(parameters: Map[String, String]): Map[String, String] = {
parameters
.keySet
.filter(_.toLowerCase(Locale.ROOT).startsWith("kafka."))
.map { k => k.drop(6).toString -> parameters(k) }
.toMap
}
}
object DTSSourceProvider {
val SID_NAME = "sid"
val USER_NAME = "user"
val PASSWORD_NAME = "password"
val KAFKA_BROKER_URL_NAME = "broker"
val KAFKA_TOPIC = "kafkatopic"
val STARTING_OFFSETS_OPTION_KEY = "startingoffsets"
val ENDING_OFFSETS_OPTION_KEY = "endingoffsets"
def getSchema: StructType = {
new StructType(Array(
StructField("key", BinaryType),
StructField("value", BinaryType),
StructField("topic", StringType),
StructField("partition", IntegerType),
StructField("offset", LongType),
StructField("timestamp", TimestampType),
StructField("timestampType", IntegerType)
))
}
def getDTSOffsetRangeLimit(
params: Map[String, String],
offsetOptionKey: String,
defaultOffsets: DTSOffsetRangeLimit): DTSOffsetRangeLimit = {
params.get(offsetOptionKey).map(_.trim) match {
case Some(offset) if offset.toLowerCase(Locale.ROOT) == "latest" =>
LatestOffsetRangeLimit
case Some(offset) if offset.toLowerCase(Locale.ROOT) == "earliest" =>
EarliestOffsetRangeLimit
case Some(json) => SpecificOffsetRangeLimit(json)
case None => defaultOffsets
}
}
def sourceKafkaProperties(originProperties: ju.Map[String, String]): Properties = {
val sid = originProperties.get(SID_NAME).toString
val user = originProperties.get(USER_NAME).toString
val password = originProperties.get(PASSWORD_NAME).toString
val kafkaBootstrapServers = originProperties.get(KAFKA_BROKER_URL_NAME).toString
val consumerConfig = new Properties()
originProperties.asScala.foreach { case (k, v) =>
if (k.startsWith("kafka.")) {
consumerConfig.put(k.substring(6), v)
}
}
// scalastyle:off
val jaasTemplate = s"""org.apache.kafka.common.security.plain.PlainLoginModule required username="$user-$sid" password="$password";"""
consumerConfig.setProperty(SaslConfigs.SASL_JAAS_CONFIG, jaasTemplate)
consumerConfig.setProperty(SaslConfigs.SASL_MECHANISM, "PLAIN")
consumerConfig.setProperty(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT")
consumerConfig.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapServers)
consumerConfig.setProperty(ConsumerConfig.GROUP_ID_CONFIG, sid)
consumerConfig.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false")
consumerConfig.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer")
consumerConfig.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer")
// scalastyle:on
consumerConfig
}
def checkOptions(options: DataSourceOptions): Unit = {
if (!options.asMap().containsKey(SID_NAME)) {
throw new MissingArgumentException(s"Missing required argument '$SID_NAME'.")
}
if (!options.asMap().containsKey(USER_NAME)) {
throw new MissingArgumentException(s"Missing required argument '$USER_NAME'.")
}
if (!options.asMap().containsKey(PASSWORD_NAME)) {
throw new MissingArgumentException(s"Missing required argument '$PASSWORD_NAME'.")
}
if (!options.asMap().containsKey(KAFKA_BROKER_URL_NAME)) {
throw new MissingArgumentException(s"Missing required argument '$KAFKA_BROKER_URL_NAME'.")
}
if (!options.asMap().containsKey(KAFKA_TOPIC)) {
throw new MissingArgumentException(s"Missing required argument '$KAFKA_TOPIC'.")
}
}
}
| aliyun/aliyun-emapreduce-sdk | emr-dts/src/main/scala/org/apache/spark/sql/aliyun/dts/DTSSourceProvider.scala | Scala | artistic-2.0 | 7,677 |
package fringe
import chisel3._
/**
* Depulser: 1-cycle pulse to a steady high signal
*/
class Depulser() extends Module {
val io = IO(new Bundle {
val in = Input(Bool())
val rst = Input(Bool())
val out = Output(Bool())
})
val r = Module(new FF(Bool()))
r.io.in := Mux(io.rst, 0.U, io.in)
r.io.init := 0.U
r.io.enable := io.in | io.rst
io.out := r.io.out
}
| stanford-ppl/spatial-lang | spatial/core/resources/chiselgen/template-level/fringeHW/Depulser.scala | Scala | mit | 388 |
package spgui.widgets
import java.time._
import java.time.OffsetDateTime
import spgui.circuit.SPGUICircuit
// import java.time.temporal._
import java.time.format.DateTimeFormatter
import java.text.SimpleDateFormat
import java.util.UUID
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.html_<^._
import japgolly.scalajs.react.vdom.all.svg
import japgolly.scalajs.react.ReactDOM
import spgui.SPWidget
import spgui.widgets.css.{WidgetStyles => Styles}
import spgui.communication._
import sp.domain._
import scala.concurrent.duration._
import scala.scalajs.js
import scala.util.{ Try, Success }
import scala.util.Random.nextInt
import scala.collection.mutable.ListBuffer
import org.scalajs.dom
import org.scalajs.dom.raw
import org.scalajs.dom.{svg => *}
import org.singlespaced.d3js.d3
import org.singlespaced.d3js.Ops._
import scalacss.ScalaCssReact._
import scalacss.DevDefaults._
import sp.erica.{API_PatientEvent => api}
import sp.erica.{API_Patient => apiPatient}
object DebuggingWidget {
private class Backend($: BackendScope[String, Map[String, apiPatient.Patient]]) {
var patientObs = Option.empty[rx.Obs]
def setPatientObs(): Unit = {
patientObs = Some(spgui.widgets.akuten.PatientModel.getPatientObserver(
patients => $.setState(patients).runNow()
))
}
val wsObs = BackendCommunication.getWebSocketStatusObserver( mess => {
if (mess) send(api.GetState)
}, "patient-cards-widget-topic")
def send(mess: api.Event) {
val json = ToAndFrom.make(SPHeader(from = "DebuggingWidget", to = "FelsökningsService"), mess)
BackendCommunication.publish(json, "widget-event")
}
/**
* Checks if the patient belongs to this team.
*/
def belongsToThisTeam(patient: apiPatient.Patient, filter: String): Boolean = {
filter.isEmpty || patient.team.team.contains(filter)
}
/**
* Returns the correct hex color for each priority.
*/
def decodeTriageColor(p: apiPatient.Priority): String = {
p.color match {
case "NotTriaged" => "#afafaf"//"#D5D5D5"
case "Blue" => "#538AF4"
case "Green" => "#009550" //"prio4"
case "Yellow" => "#EAC706" //"prio3"
case "Orange" => "#F08100" //"prio2"
case "Red" => "#950000" //"prio1"
case _ => {
println("TriageColor: "+ p.color +" not expected in DebuggingWidget")
return "#D5D5D5" //"prioNA"
}
}
}
/*
Takes an AttendedEvent and returns a tuple containing a bool declaring wether
icon should be filled or not as we as a string containing text to be shown
**/
def decodeAttended(a: apiPatient.Attended): (Boolean, String) = {
if (a.attended)
(true, a.doctorId)
else
(false, "Ej Påtittad")
}
/*
Converts the Team.team-field of a given Team into a color value and letter to display
**/
def decodeTeam(t: apiPatient.Team): (String) = {
t.team match {
case "no-match" => ("")
case "process" => ("P")
case "stream" => ("S")
case "kirurgi" => ("K")
case "medicin gul" => ("M")
case "medicin blå" => ("M")
case "medicin" => ("M")
case "NAKM" => ("NAKM")
case "ortopedi" => ("O")
case "jour" => ("J")
case _ => ("")
}
}
/**
Decodes the initial background and icon colors of the progress bar. Tuple values as follows:
(initial background, initial symbol)
*/
def progressBarInitialColoring(p: apiPatient.Priority): (String, String) = {
p.color match {
case "NotTriaged" => ("#E0E0E0", "#AFAFAF")
case "Blue" => ("#DDE8FF", "#538AF4")
case "Green" => ("#F5FAF8", "#DCE2DF")
case "Yellow" => ("#FFED8D", "#EAC706")
case "Orange" => ("#FCC381", "#F08100")
case "Red" => ("#D99898", "#950000")
case _ => {
println("TriageColor: "+ p.color +" not expected in DebuggingWidget")
return ("#E0E0E0", "#AFAFAF") //"NotTriaged"
}
}
}
/**
Decodes the background and icon colors of the progress bar when stages are completed.
Returns List as follows:
List((attended background color, attended symbol color, patient is attended),
(plan background color, plan symbol color, plan does exist),
(finished background color, finished symbol color, patient is finished))
*/
def progressBarColoring(p: apiPatient.Patient): List[(String, String, Boolean)] = {
val coloring = ListBuffer[(String, String, Boolean)]()
val initColoring = progressBarInitialColoring(p.priority)
if (decodeAttended(p.attended)._1) coloring += Tuple3("#8D47AA", "#FFFFFF", true)
else coloring += Tuple3(initColoring._1, initColoring._2, false)
if (p.plan.hasPlan) coloring += Tuple3("#E9B7FF", "#FFFFFF", true) // To be implemented: Plan exists
else coloring += Tuple3(initColoring._1, initColoring._2, false)
if (p.finished.finishedStillPresent) coloring += Tuple3("#47AA62", "#FFEDFF", true)
else coloring += Tuple3(initColoring._1, initColoring._2, false)
coloring.toList
}
/**
* Converts milliseconds to hours and minutes, visualized in string.
*/
def getTimeDiffReadable(milliseconds: Long): (String, String) = {
val minutes = ((milliseconds / (1000*60)) % 60)
val hours = ((milliseconds / (1000*60*60)) )//% 24)
val timeString = (hours, minutes) match {
case (0,_) => {if (minutes == 0) "" else (minutes + " min").toString}
case (_,0) => (hours + " h").toString
case _ => (hours + " h " + minutes + " m").toString
}
val days = (milliseconds / (1000*60*60*24))
val dayString = days match {
case 0 => ""
case (n: Long) => "(> "+ n + " dygn)"
}
(timeString, dayString)
}
/*
* Returns true if a patient has waited longer than is recommended according
* to triage priority.
* Prio red: immediately, Prio orange: 20 min, Prio yellow or green: 60 min.
**/
def hasWaitedTooLong(p: apiPatient.Patient) = {
p.priority.color match {
case "Green" | "Yellow" => if (p.latestEvent.timeDiff > 3600000) true else false
case "Orange" => if (p.latestEvent.timeDiff > 1200000) true else false
case "Red" => true
case _ => false
}
}
/**
From ElvisDataHandler.scala
Code used for data to CoordinatorDiagramWidget
*/
def decodeTeam(reasonForVisit: String, location: String, clinic: String): String = {
reasonForVisit match {
case "AKP" => "stream"
case "ALL" | "TRAU" => "process"
case "B" | "MEP" => {
clinic match {
case "NAKME" => {
if (location != "") {
location.charAt(0) match {
case 'B' => "medicin blå"
case 'G' => "medicin gul"
case 'P' => "process"
case _ => "medicin övriga"
}
} else {
"medicin övriga"
}
}
case "NAKKI" => "kirurgi"
case "NAKOR" => "ortopedi"
case "NAKBA" | "NAKGY" | "NAKÖN" => "jour"
case "NAKM" => {
if (location != "") {
location.charAt(0) match {
case 'B' => "medicin blå"
case 'G' => "medicin gul"
case 'P' => "process"
case _ => "NAKM"
}
} else {
"NAKM"
}
}
case _ => "no-match"
}
}
case _ => "no-match"
}
}
/*
Specifies a patientCard in SVG for scalajs-react based on a Patient.
**/
def patientCard(p: apiPatient.Patient) = {
val cardScaler = 1.2
val cardHeight = 100 // change only with new graphics
val cardWidth = 176 // change only with new graphics
val fontSizeSmall = 7.6
val fontSizeMedium = 15.2
val fontSizeLarge = 35
val cardBackgroundColor = "#ffffff"
val contentColorDark = "#000000"
val contentColorLight = "#ffffff"
val contentColorAttention = "#E60000"
val delimiterColor = "#95989a"
val shadowColor = "lightgrey"
svg.svg( //ARTO: Skapar en <svg>svg>-tagg att fylla med objekt
^.key := p.careContactId,
^.`class` := "patient-card-canvas",
svg.width := (cardScaler * cardWidth * 1.04).toString,
svg.height := (cardScaler * cardHeight * 1.04).toString,
svg.viewBox := "0 0 "+ (cardWidth + 4).toString +" "+ (cardHeight + 4).toString,
// svg.transform := "scale(" + cardScaler + ")",
svg.id := p.careContactId,
svg.g(
^.`class` := "patient-card-graphics",
//svg.transform := "translate(0,0)",
svg.rect(
^.`class` := "shadow",
svg.y := "2",
svg.x := "2",
svg.height := cardHeight.toString,
svg.width := cardWidth.toString,
svg.fill := shadowColor
),
svg.rect(
^.`class` := "bg-field",
svg.y := 0,
svg.x := 0,
svg.height := cardHeight,
svg.width := cardWidth,
svg.fill := cardBackgroundColor
),
svg.path(
^.`class` := "triage-field",
svg.d := "m 0.13909574,0.13190582 62.53049726,0 0,99.99740418 -62.53049726,0 z",
svg.fill := decodeTriageColor(p.priority)
),
svg.path(
^.`class` := "delimiter",
svg.d := "m 67.626393,80.531612 0,1.07813 103.136807,0 0,-1.07813 -103.136807,0 z",
svg.fill := delimiterColor
),
if (p.latestEvent.latestEvent != "") { // Only draw this if latestEvent exists.
svg.path(
^.`class` := "timer-symbol",
svg.d := "m 72.589477,69.337097 -2.03739,0 0,0.679124 2.03739,0 0,-0.679124 z m -1.358295,4.414306 0.679131,0 0,-2.037372 -0.679131,0 0,2.037372 z m 2.72676,-2.244506 0.482133,-0.482177 C 74.29407,70.851542 74.134459,70.688554 73.961309,70.545937 l -0.482201,0.482178 c -0.526322,-0.421054 -1.188477,-0.672333 -1.90836,-0.672333 -1.687619,0 -3.056016,1.368437 -3.056016,3.05606 0,1.687621 1.36503,3.056057 3.056016,3.056057 1.691055,0 3.056084,-1.368436 3.056084,-3.056057 0,-0.719873 -0.251256,-1.382018 -0.66889,-1.904945 z m -2.387194,4.281878 c -1.314105,0 -2.37692,-1.062829 -2.37692,-2.376933 0,-1.314105 1.062815,-2.376934 2.37692,-2.376934 1.314104,0 2.376954,1.062829 2.376954,2.376934 0,1.314104 -1.06285,2.376933 -2.376954,2.376933 z",
svg.fill := contentColorDark
)
} else {
svg.path(
^.`class` := "no-timer-symbol"
)
},
svg.path(
^.`class` := "clock-symbol",
// svg.transform := "translate(0,1)",
svg.d := "m 72.223193,87.546362 -0.6325,0 0,2.53288 2.2147,1.32892 0.3181,-0.52057 -1.8987,-1.12633 z m -0.2155,-2.10927 a 4.218563,4.218563 0 1 0 4.2229,4.21856 4.2164074,4.2164074 0 0 0 -4.2229,-4.21856 z m 0,7.59373 a 3.3746349,3.3746349 0 1 1 3.3746,-3.37464 3.374096,3.374096 0 0 1 -3.3746,3.37464 z",
svg.fill := contentColorDark
),
svg.path(
^.`class` := "doctor-symbol",
//svg.transform := "translate(0,2)",
svg.d := "m 127.90317,90.593352 c -1.1749,0 -3.519,0.58956 -3.519,1.75954 l 0,0.88002 7.0385,0 0,-0.88002 c 0,-1.16998 -2.3446,-1.75954 -3.5195,-1.75954 z m 0,-0.88004 a 1.759531,1.759531 0 1 0 -1.7596,-1.75951 1.7589921,1.7589921 0 0 0 1.7596,1.75951 z",
svg.fill := contentColorDark
),
svg.path(
^.`class` := "timeline-attended-bg",
svg.d := "m 17.109896,81.261292 6.5568,9.76783 -6.5535,9.096778 0,-15.203718 z m -16.97080026,0.007 16.97560026,0 0,18.861728 -16.97560026,0 z",
svg.fill := progressBarColoring(p).head._1
),
svg.path(
^.`class` := "timeline-attended-symbol",
svg.d := "m 10.541796,86.403152 c -2.6944999,0 -4.9955003,1.67602 -5.9280003,4.0418 0.9325,2.36581 3.2335004,4.04181 5.9280003,4.04181 2.6946,0 4.9958,-1.676 5.9281,-4.04181 -0.9323,-2.36578 -3.2335,-4.0418 -5.9281,-4.0418 l 0,0 z m 0,6.73634 c -1.4872999,0 -2.6944999,-1.20716 -2.6944999,-2.69454 0,-1.48737 1.2072,-2.69452 2.6944999,-2.69452 1.4876,0 2.6946,1.20715 2.6946,2.69452 0,1.48738 -1.207,2.69454 -2.6946,2.69454 l 0,0 z m 0,-4.31126 c -0.8944999,0 -1.6166999,0.72212 -1.6166999,1.61672 0,0.89458 0.7222,1.61672 1.6166999,1.61672 0.8946,0 1.6169,-0.72214 1.6169,-1.61672 0,-0.8946 -0.7223,-1.61672 -1.6169,-1.61672 l 0,0 z",
svg.fill := progressBarColoring(p).head._2
),
svg.path(
^.`class` := "timeline-plan-bg",
svg.d := "m 40.222496,100.12931 -22.57,0 0,-0.077 6.4593,-8.969548 -6.4593,-9.65344 0,-0.16168 22.57,0 6.5137,9.77577 -6.5137,9.045548 0,0 z",
svg.fill := progressBarColoring(p).tail.head._1
),
if (!progressBarColoring(p).tail.head._3) { // Choose correct version of symbol
svg.path(
^.`class` := "timeline-no-plan-symbol",
svg.d := "m 33.116996,86.655372 c 0.2401,0 0.4366,0.19645 0.4366,0.43654 0,0.2401 -0.1965,0.43653 -0.4366,0.43653 -0.24,0 -0.4365,-0.19643 -0.4365,-0.43653 0,-0.24009 0.1965,-0.43654 0.4365,-0.43654 l 0,0 z m 3.056,0 -1.8249,0 c -0.1835,-0.50637 -0.6635,-0.87306 -1.2311,-0.87306 -0.5674,0 -1.0477,0.36669 -1.2311,0.87306 l -1.8246,0 c -0.4803,0 -0.8731,0.39289 -0.8731,0.87307 l 0,6.11154 c 0,0.4802 0.3928,0.87309 0.8731,0.87309 l 6.1117,0 c 0.4801,0 0.8731,-0.39289 0.8731,-0.87309 l 0,-6.11154 c 0,-0.48018 -0.393,-0.87307 -0.8731,-0.87307 l 0,0 z",
svg.fill := progressBarColoring(p).tail.head._2
)
} else {
svg.path(
^.`class` := "timeline-plan-symbol",
svg.d := "m 36.172996,86.655372 -1.8249,0 c -0.1835,-0.50637 -0.6635,-0.87306 -1.2311,-0.87306 -0.5674,0 -1.0477,0.36669 -1.2311,0.87306 l -1.8246,0 c -0.4803,0 -0.8731,0.39289 -0.8731,0.87307 l 0,6.11154 c 0,0.4802 0.3928,0.87309 0.8731,0.87309 l 6.1117,0 c 0.4801,0 0.8731,-0.39289 0.8731,-0.87309 l 0,-6.11154 c 0,-0.48018 -0.393,-0.87307 -0.8731,-0.87307 l 0,0 z m -3.056,0 c 0.2401,0 0.4366,0.19645 0.4366,0.43654 0,0.2401 -0.1965,0.43653 -0.4366,0.43653 -0.24,0 -0.4365,-0.19643 -0.4365,-0.43653 0,-0.24009 0.1965,-0.43654 0.4365,-0.43654 l 0,0 z m -0.8731,6.11153 -1.7461,-1.74615 0.6156,-0.6155 1.1305,1.12625 2.877,-2.87679 0.6155,0.61989 -3.4925,3.4923 0,0 z",
svg.fill := progressBarColoring(p).tail.head._2
)
},
svg.path(
^.`class` := "timeline-finished-bg",
svg.d := "m 62.669093,100.12931 -21.873197,0 0,-0.0889 6.4792,-8.997048 -6.4787,-9.68038 0,-0.0954 21.872697,0 z",
svg.fill := progressBarColoring(p).tail.tail.head._1
),
svg.path(
^.`class` := "timeline-finished-symbol",
svg.d := "m 52.551893,92.530302 -2.4802,-2.54684 -0.8266,0.84894 3.3068,3.39576 7.0862,-7.27664 -0.8268,-0.84894 -6.2594,6.42772 z",
svg.fill := progressBarColoring(p).tail.tail.head._2
),
svg.rect(
^.`class` := "STATUSFÄRG",
svg.y := "28.914265",
svg.x := "154.93719",
svg.height := "11.111678",
svg.width := "11.111678",
svg.fill := { if (p.finished.finishedStillPresent) { // code from statuswidget
"#ffedff"
} else {
if (p.plan.hasPlan) {
"#e9b7ff"
} else if (p.attended.attended) {
"#8d47aa"
} else {
"#1c0526"
}
}
}
),
svg.rect(
^.`class` := "PLATSFÄRG",
svg.y := "46.844475",
svg.x := "155.18974",
svg.height := "11.111678",
svg.width := "11.111678",
svg.fill := { // code from placewidget: method getPlace
if (p.examination.isOnExam) "#9df2e9"
else if (p.location.roomNr == "ivr") "#1b998b"
else if (p.location.roomNr != "") "#4d5256"
else "#f5fffe"
}
)
),
svg.g(
^.`class` := "patient-card-text",
svg.text(
^.`class` := "room-nr",
svg.y := "47.546173",
svg.x := "22",
svg.textAnchor := "middle",
svg.fontSize := fontSizeLarge + "px",
svg.fill := {if (p.priority.color == "NotTriaged") contentColorDark else contentColorLight},
p.debugging.location
),
svg.text(
^.`class` := "team-letter",
svg.y := "10.549294",
svg.x := "170.73709",
svg.textAnchor := "end",
svg.fontSize := fontSizeSmall + "px",
svg.fill := contentColorDark,
decodeTeam(p.team)
),
svg.text(
^.`class` := "header-latest-event",
svg.y := "52.782063",
svg.x := "68.144127",
svg.textAnchor := "start",
svg.fontSize := fontSizeSmall/2 + "px",
svg.fill := contentColorDark,
if (p.latestEvent.latestEvent != "") "Senaste händelse"
else "Ingen senaste händelse"
),
svg.text(
^.`class` := "latest-event",
svg.y := "67.524231",
svg.x := "66.502892",
svg.textAnchor := "start",
svg.fontSize := "12px",
Styles.freeSansBold,
svg.fill := contentColorDark,
p.latestEvent.latestEvent.toUpperCase
),
svg.text(
^.`class` := "time-since-latest-event",
svg.y := "77.683937",
svg.x := "77.683937",
svg.textAnchor := "start",
svg.fontSize := "6px",
svg.fill := { if (hasWaitedTooLong(p)) contentColorAttention else contentColorDark },
svg.tspan(svg.x := "93")(getTimeDiffReadable(p.latestEvent.timeDiff)._1)
//svg.tspan(svg.x := "93", svg.dy := "15 px")(getTimeDiffReadable(p.latestEvent.timeDiff)._2)
),
svg.text(
^.`class` := "ccid",
svg.y := "75.8",
svg.x := "1.9",
svg.textAnchor := "start",
svg.fontSize := "12px",
svg.fill := contentColorDark,
p.careContactId
),
svg.text(
^.`class` := "header-team",
svg.y := "5.7542624",
svg.x := "7.9752121",
svg.textAnchor := "start",
svg.fontSize := "4px",
svg.fill := contentColorDark,
"Team"
),
svg.text(
^.`class` := "header-klinik",
svg.y := "5.7542624",
svg.x := "67.975212",
svg.textAnchor := "start",
svg.fontSize := "4px",
svg.fill := contentColorDark,
"Klinik"
),
svg.text(
^.`class` := "header-reasonforvisit",
svg.y := "6.0068011",
svg.x := "115.65415",
svg.textAnchor := "start",
svg.fontSize := "4px",
svg.fill := contentColorDark,
"ReasonForVisit"
),
svg.text(
^.`class` := "header-koordinatordiagramcategory",
svg.y := "19.364769",
svg.x := "68.062164",
svg.textAnchor := "start",
svg.fontSize := "4px",
svg.fill := contentColorDark,
"I koordinatordiagrammet"
),
svg.text(
^.`class` := "header-triagediagramcategory",
svg.y := "33.38578",
svg.x := "67.924202",
svg.textAnchor := "start",
svg.fontSize := "4px",
svg.fill := contentColorDark,
"I triagediagrammet"
),
svg.text(
^.`class` := "header-status",
svg.y := "28.203604",
svg.x := "154.476092",
svg.textAnchor := "start",
svg.fontSize := "4px",
svg.fill := contentColorDark,
"Status"
),
svg.text(
^.`class` := "header-plats",
svg.y := "45.150215",
svg.x := "154.476092",
svg.textAnchor := "start",
svg.fontSize := "4px",
svg.fill := contentColorDark,
"Plats"
),
svg.text(
^.`class` := "kliniktext",
svg.y := "14.173706",
svg.x := "68.062424",
svg.textAnchor := "start",
svg.fontSize := "12px",
svg.fill := contentColorDark,
p.debugging.clinic
),
svg.text(
^.`class` := "teamtext",
svg.y := "14.173706",
svg.x := "5.0624237",
svg.textAnchor := "start",
svg.fontSize := "12px",
svg.fill := contentColorDark,
p.team.team
),
svg.text(
^.`class` := "reasonForVisitext",
svg.y := "14.629057",
svg.x := "115.99483",
svg.textAnchor := "start",
svg.fontSize := "12px",
svg.fill := contentColorDark,
p.debugging.reasonForVisit
),
svg.text(
^.`class` := "IKOORDINATORDIAGRAMMETTEXT",
svg.y := "28.01358",
svg.x := "67.315895",
svg.textAnchor := "start",
svg.fontSize := "12px",
svg.fill := contentColorDark,
decodeTeam(p.debugging.reasonForVisit, p.debugging.location, p.debugging.clinic)
),
svg.text(
^.`class` := "ItriageDIAGRAMMETTEXT",
svg.y := "42.034588",
svg.x := "67.177933",
svg.textAnchor := "start",
svg.fontSize := "12px",
svg.fill := contentColorDark,
p.priority.color
),
svg.text(
^.`class` := "arrival-time",
svg.y := "93.13282",
svg.x := "79",
svg.textAnchor := "start",
svg.fontSize := fontSizeSmall + "px",
svg.fill := contentColorDark,
p.arrivalTime.timeDiff // WAS:timestampToODT(p.arrivalTime.timestamp).format(DateTimeFormatter.ofPattern("H'.'m'"))
),
svg.text(
^.`class` := "attendant-id",
svg.y := "93.13282",
svg.x := "133.8488",
svg.textAnchor := "start",
svg.fontSize := fontSizeSmall + "px",
svg.fill := contentColorDark,
decodeAttended(p.attended)._2
)
)
)
}
/*
Sorts a Map[String, Patient] by room number and returns a list of sorted ccids.
Patients missing room number are sorted by careContactId.
Sorting: (1,2,3,a,b,c, , , )
**/
def sortPatientsByRoomNr(pmap: Map[String, apiPatient.Patient]): List[String] = {
val currentCcids = pmap.map(p => p._1)
val ccidsSortedByRoomNr = ListBuffer[(String, String)]()
val ccidsMissingRoomNr = ListBuffer[(String, String)]()
val ccidsWithSpecialRoomNr = ListBuffer[(String, String)]()
currentCcids.foreach{ ccid =>
if (pmap(ccid).location.roomNr == "") ccidsMissingRoomNr += Tuple2(ccid, ccid)
else if (pmap(ccid).location.roomNr.forall(_.isDigit)) ccidsSortedByRoomNr += Tuple2(ccid, pmap(ccid).location.roomNr)
else ccidsWithSpecialRoomNr += Tuple2(ccid, pmap(ccid).location.roomNr)
}
(ccidsSortedByRoomNr.sortBy(_._2.toInt) ++ ccidsWithSpecialRoomNr.sortBy(_._2) ++ ccidsMissingRoomNr.sortBy(_._2)).map(p => p._1).toList
}
def render(filter: String, pmap: Map[String, apiPatient.Patient]) = {
val pats = (pmap - "-1").filter(p => belongsToThisTeam(p._2, filter))
<.div(^.`class` := "card-holder-root", Styles.helveticaZ, Styles.hideScrollBar)(
svg.svg(
svg.width := "0",
svg.height := "0",
svg.defs(
svg.pattern(
svg.id := "untriagedPattern",
svg.width := "35.43",
svg.height := "35.43",
svg.patternUnits := "userSpaceOnUse",
svg.patternTransform := "translate(0,0)",
svg.path(
svg.fill := "#000000",
svg.d := "M 1.96875 0 L 0 1.96875 L 0 2.25 L 2.25 0 L 1.96875 0 z M 10.814453 0 L 0 10.816406 L 0 11.097656 L 11.097656 0 L 10.814453 0 z M 19.65625 0 L 0 19.65625 L 0 19.941406 L 19.939453 0 L 19.65625 0 z M 28.517578 0 L 0 28.517578 L 0 28.800781 L 28.800781 0 L 28.517578 0 z M 35.433594 1.9453125 L 1.9453125 35.433594 L 2.2285156 35.433594 L 35.433594 2.2285156 L 35.433594 1.9453125 z M 35.433594 10.841797 L 10.841797 35.433594 L 11.125 35.433594 L 35.433594 11.125 L 35.433594 10.841797 z M 35.433594 19.738281 L 19.738281 35.433594 L 20.019531 35.433594 L 35.433594 20.021484 L 35.433594 19.738281 z M 35.433594 28.603516 L 28.605469 35.433594 L 28.886719 35.433594 L 35.433594 28.886719 L 35.433594 28.603516 z "
)
)
)
),
sortPatientsByRoomNr(pats).map{ ccid =>
patientCard(pats(ccid))
}.toVdomArray
)
}
def onUnmount() = {
println("Unmounting")
patientObs.foreach(_.kill())
wsObs.kill()
Callback.empty
}
}
def extractTeam(attributes: Map[String, SPValue]) = {
attributes.get("team").flatMap(x => x.asOpt[String]).getOrElse("medicin")
}
private val cardHolderComponent = ScalaComponent.builder[String]("cardHolderComponent")
.initialState(Map("-1" ->
EricaLogic.dummyPatient))
.renderBackend[Backend]
.componentDidMount(ctx => Callback(ctx.backend.setPatientObs()))
.componentWillUnmount(_.backend.onUnmount())
.build
def apply() = spgui.SPWidget(spwb => {
val currentTeam = extractTeam(spwb.frontEndState.attributes)
cardHolderComponent(currentTeam)
})
}
| kristoferB/SP | sperica/frontend/src/main/scala/spgui/widgets/DebuggingWidget.scala | Scala | mit | 26,437 |
/*
* Copyright 2012 Alexander Bertram
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.fhwedel.antscout
package antnet
import net.liftweb.common.Box
import net.liftweb.json.JsonDSL._
import net.liftweb.json.JsonAST.JArray
import akka.actor.ActorRef
/**
* Repräsentiert einen Pfad.
*
* @param source Quelle
* @param destination Ziel
* @param ways Wege
*/
case class Path(source: ActorRef, destination: ActorRef, ways: Seq[AntWay]) {
/**
* Länge und Reise-Zeit
*/
lazy val (length, tripTime) = ways.foldLeft(0.0, 0.0) {
case ((lengthAcc, tripTimeAcc), way) => (way.length + lengthAcc, way.tripTime + tripTimeAcc)
}
/**
* Erzeugt eine Json-Repräsentation.
*
* @return Json-Repräsentation
*/
def toJson = {
("destination" -> AntNode.toOsmNode(destination).toJson) ~
("length" -> "%.4f".format(length / 1000)) ~
("lengths" -> JArray(List(
("unit" -> "m") ~
("value" -> "%.4f".format(length))))) ~
("source" -> AntNode.toOsmNode(source).toJson) ~
("tripTime" -> "%.4f".format(tripTime / 60)) ~
("tripTimes" -> JArray(List(
("unit" -> "s") ~
("value" -> "%.4f".format(tripTime)),
("unit" -> "h") ~
("value" -> "%.4f".format(tripTime / 3600))))) ~
("ways" -> ways.map(_.toJson))
}
}
/**
* Path-Factory.
*/
object Path {
/**
* Erzeugt eine neue [[de.fhwedel.antscout.antnet.Path]]-Instanz.
*
* @param ways Wege
* @param source Quelle
* @param destination Ziel
* @return [[de.fhwedel.antscout.antnet.Path]]-Instanz
*/
def apply(source: ActorRef, destination: ActorRef, ways: Box[Seq[AntWay]]) = {
ways map { new Path(source, destination, _) }
}
}
| abertram/AntScout | src/main/scala/de.fhwedel.antscout/antnet/Path.scala | Scala | apache-2.0 | 2,213 |
package dpla.ingestion3.mappers.providers
import dpla.ingestion3.mappers.utils._
import dpla.ingestion3.messages.IngestMessageTemplates
import dpla.ingestion3.model.DplaMapData.{AtLeastOne, ExactlyOne, ZeroToMany, ZeroToOne}
import dpla.ingestion3.model._
import dpla.ingestion3.utils.Utils
import org.json4s
import org.json4s.JsonDSL._
import org.json4s._
class IaMapping extends JsonMapping with JsonExtractor with IngestMessageTemplates {
// ID minting functions
override def useProviderName: Boolean = true
override def getProviderName: Option[String] = Some("ia")
override def originalId(implicit data: Document[JValue]): ZeroToOne[String] =
extractString(unwrap(data) \\ "identifier")
// OreAggregration
override def dataProvider(data: Document[JValue]): ZeroToMany[EdmAgent] = {
val candidateDataProviders =
extractStrings(unwrap(data) \\\\ "contributor")
.map(nameOnlyAgent)
if (candidateDataProviders isEmpty)
Seq(nameOnlyAgent("Internet Archive"))
else
candidateDataProviders
}
override def dplaUri(data: Document[JValue]): ZeroToOne[URI] = mintDplaItemUri(data)
override def edmRights(data: Document[json4s.JValue]): ZeroToMany[URI] =
extractStrings(unwrap(data) \\\\ "licenseurl").map(URI)
override def iiifManifest(data: Document[JValue]): ZeroToMany[URI] =
extractStrings(unwrap(data) \\\\ "identifier")
.map(identifier => URI(s"https://iiif.archivelab.org/iiif/$identifier/manifest.json"))
override def intermediateProvider(data: Document[JValue]): ZeroToOne[EdmAgent] =
extractStrings(unwrap(data) \\\\ "collection").flatMap {
// transforms institution shortnames into properly formatted names
case "medicalheritagelibrary" => Some("Medical Heritage Library")
case "blc" => Some("Boston Library Consortium")
case _ => None
}
.map(nameOnlyAgent)
.headOption
override def isShownAt(data: Document[JValue]): ZeroToMany[EdmWebResource] =
extractStrings(unwrap(data) \\\\ "identifier")
.map(identifier => stringOnlyWebResource("http://www.archive.org/details/" + identifier))
override def originalRecord(data: Document[JValue]): ExactlyOne[String] = Utils.formatJson(data)
override def preview(data: Document[JValue]): ZeroToMany[EdmWebResource] =
extractStrings(unwrap(data) \\\\ "identifier")
.map(identifier => stringOnlyWebResource("https://archive.org/services/img/" + identifier))
override def provider(data: Document[JValue]): ExactlyOne[EdmAgent] = agent
override def sidecar(data: Document[JValue]): JValue =
("prehashId", buildProviderBaseId()(data)) ~ ("dplaId", mintDplaId(data))
// SourceResource
override def creator(data: Document[JValue]): ZeroToMany[EdmAgent] =
extractStrings(unwrap(data) \\\\ "creator").map(nameOnlyAgent)
override def date(data: Document[JValue]): ZeroToMany[EdmTimeSpan] =
extractStrings(unwrap(data) \\\\ "date").map(stringOnlyTimeSpan)
override def description(data: Document[JValue]): ZeroToMany[String] =
extractStrings(unwrap(data) \\\\ "description")
override def language(data: Document[JValue]): ZeroToMany[SkosConcept] =
extractStrings(unwrap(data) \\\\ "language").map(nameOnlyConcept)
override def publisher(data: Document[JValue]): ZeroToMany[EdmAgent] =
extractStrings(unwrap(data) \\\\ "publisher").map(nameOnlyAgent)
override def rights(data: Document[JValue]): AtLeastOne[String] = {
val defaultRights = Seq("Access to the Internet Archive’s Collections is granted for scholarship " +
"and research purposes only. Some of the content available through the Archive may be governed " +
"by local, national, and/or international laws and regulations, and your use of such content " +
"is solely at your own risk")
val rights = extractStrings(unwrap(data) \\\\ "rights") ++
extractStrings(unwrap(data) \\\\ "possible-copyright-status")
if (rights.nonEmpty) rights else defaultRights
}
override def subject(data: Document[JValue]): ZeroToMany[SkosConcept] =
extractStrings(unwrap(data) \\\\ "subject").map(nameOnlyConcept)
override def title(data: Document[JValue]): AtLeastOne[String] = {
val titles = extractStrings(unwrap(data) \\\\ "title")
val vols = extractStrings(unwrap(data) \\\\ "volume")
val issues = extractStrings(unwrap(data) \\\\ "issue")
val max = List(titles.size, vols.size, issues.size).max
val t_max = titles.padTo(max, "")
val v_max = vols.padTo(max, "")
val i_max = issues.padTo(max, "")
// Merges t_max, v_max and i_max together separated by comma.
((t_max zip v_max).map(a => (a._1.isEmpty, a._2.isEmpty) match {
case (false, false) => s"${a._1}, ${a._2}"
case (true, true) => ""
case (true, false) => s"${a._2}"
case (false, true) => s"${a._1}"
}) zip i_max).map(a => (a._1.isEmpty, a._2.isEmpty) match {
case (false, false) => s"${a._1}, ${a._2}"
case (true, true) => ""
case (true, false) => s"${a._2}"
case (false, true) => s"${a._1}"
})
}
override def `type`(data: Document[JValue]): ZeroToMany[String] =
extractStrings(unwrap(data) \\\\ "mediatype")
def agent = EdmAgent(
name = Some("Internet Archive"),
uri = Some(URI("http://dp.la/api/contributor/ia"))
)
}
| dpla/ingestion3 | src/main/scala/dpla/ingestion3/mappers/providers/IaMapping.scala | Scala | mit | 5,292 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.process.analytic
import org.geotools.data.collection.ListFeatureCollection
import org.junit.runner.RunWith
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class SamplingProcessTest extends Specification {
val process = new SamplingProcess
val sft = SimpleFeatureTypes.createType("sample", "track:String,dtg:Date,*geom:Point:srid=4326")
val fc = new ListFeatureCollection(sft)
val features = (0 until 10).map { i =>
val sf = new ScalaSimpleFeature(sft, i.toString)
sf.setAttribute(0, s"t-${i % 2}")
sf.setAttribute(1, s"2017-05-24T00:00:0$i.000Z")
sf.setAttribute(2, s"POINT(45 5$i)")
sf
}
step {
features.foreach(fc.add)
}
"SamplingProcess" should {
"manually visit a feature collection" in {
val result = SelfClosingIterator(process.execute(fc, 0.2f, null, null).features).toSeq
result mustEqual features.slice(0, 1) ++ features.slice(5, 6)
}
"manually visit a feature collection with threading" in {
val result = SelfClosingIterator(process.execute(fc, 0.5f, "track", null).features).toSeq
result mustEqual features.slice(0, 2) ++ features.slice(4, 6) ++ features.slice(8, 10)
}
}
}
| locationtech/geomesa | geomesa-process/geomesa-process-vector/src/test/scala/org/locationtech/geomesa/process/analytic/SamplingProcessTest.scala | Scala | apache-2.0 | 1,928 |
import scala.collection.mutable
import scala.io.Source
import types.{MalList, _list, _list_Q,
MalVector, _vector, _vector_Q,
MalHashMap, _hash_map_Q, _hash_map,
Func, MalFunction}
import printer._pr_list
object core {
def mal_throw(a: List[Any]) = {
throw new types.MalException(printer._pr_str(a(0))).init(a(0))
}
// Scalar functions
def keyword(a: List[Any]) = {
"\\u029e" + a(0).asInstanceOf[String]
}
def keyword_Q(a: List[Any]) = {
a(0) match {
case s: String => s(0) == '\\u029e'
case _ => false
}
}
// number functions
def _bool_op(a: List[Any], op: (Long, Long) => Boolean) = {
op(a(0).asInstanceOf[Long],a(1).asInstanceOf[Long])
}
def _num_op(a: List[Any], op: (Long, Long) => Long) = {
op(a(0).asInstanceOf[Long],a(1).asInstanceOf[Long])
}
// string functions
def read_string(a: List[Any]) = {
reader.read_str(a(0).asInstanceOf[String])
}
def slurp(a: List[Any]) = {
Source.fromFile(a(0).asInstanceOf[String]).getLines.mkString("\\n") + "\\n"
}
// Hash Map functions
def assoc(a: List[Any]): Any = {
a(0).asInstanceOf[MalHashMap] ++ _hash_map(a.drop(1):_*)
}
def dissoc(a: List[Any]): Any = {
var kSet = a.drop(1).toSet
a(0).asInstanceOf[MalHashMap]
.filterKeys{ !kSet.contains(_) }
}
def get(a: List[Any]): Any = {
val hm = a(0).asInstanceOf[MalHashMap]
val key = a(1).asInstanceOf[String]
if (hm != null && hm.value.contains(key)) hm(key) else null
}
def contains_Q(a: List[Any]): Any = {
a(0).asInstanceOf[MalHashMap].value
.contains(a(1).asInstanceOf[String])
}
// sequence functions
def concat(a: List[Any]): Any = {
_list((for (sq <- a) yield types._toIter(sq)).flatten:_*)
}
def nth(a: List[Any]): Any = {
val lst = a(0).asInstanceOf[MalList].value
val idx = a(1).asInstanceOf[Long]
if (idx < lst.length) {
lst(idx.toInt)
} else {
throw new Exception("nth: index out of range")
}
}
def first(a: List[Any]): Any = {
val lst = a(0).asInstanceOf[MalList].value
if (lst.length > 0) lst(0) else null
}
def rest(a: List[Any]): Any = {
a(0) match {
case null => true
case ml: MalList => _list(ml.drop(1).value:_*)
}
}
def empty_Q(a: List[Any]): Any = {
a(0) match {
case null => true
case ml: MalList => ml.value.isEmpty
}
}
def count(a: List[Any]): Any = {
a(0) match {
case null => 0
case ml: MalList => ml.value.length.asInstanceOf[Long]
}
}
def conj(a: List[Any]): Any = {
a(0) match {
case mv: MalVector => {
_vector(mv.value ++ a.slice(1,a.length):_*)
}
case ml: MalList => {
_list(a.slice(1,a.length).reverse ++ ml.value:_*)
}
}
}
def apply(a: List[Any]): Any = {
a match {
case f :: rest => {
var args1 = rest.slice(0,rest.length-1)
var args = args1 ++ rest(rest.length-1).asInstanceOf[MalList].value
types._apply(f, args)
}
case _ => throw new Exception("invalid apply call")
}
}
def do_map(a: List[Any]): Any = {
a match {
case f :: seq :: Nil => {
seq.asInstanceOf[MalList].map(x => types._apply(f,List(x)))
}
case _ => throw new Exception("invalid map call")
}
}
// meta functions
def with_meta(a: List[Any]): Any = {
val meta: Any = a(1)
a(0) match {
case ml: MalList => {
val new_ml = ml.clone()
new_ml.meta = meta
new_ml
}
case hm: MalHashMap => {
val new_hm = hm.clone()
new_hm.meta = meta
new_hm
}
case fn: Func => {
val new_fn = fn.clone()
new_fn.meta = meta
new_fn
}
case fn: MalFunction => {
val new_fn = fn.clone()
new_fn.meta = meta
new_fn
}
case _ => throw new Exception("no meta support for " + a(0).getClass)
}
}
def meta(a: List[Any]): Any = {
a(0) match {
case ml: MalList => ml.meta
case hm: MalHashMap => hm.meta
case fn: Func => fn.meta
case fn: MalFunction => fn.meta
case _ => throw new Exception("no meta support for " + a(0).getClass)
}
}
// atom functions
def reset_BANG(a: List[Any]): Any = {
a(0).asInstanceOf[types.Atom].value = a(1)
a(1)
}
def swap_BANG(a: List[Any]): Any = {
a match {
case a0 :: f :: rest => {
val atm = a0.asInstanceOf[types.Atom]
val args = atm.value +: rest
atm.value = types._apply(f, args)
atm.value
}
case _ => throw new Exception("invalid swap! call")
}
}
val ns: Map[String, (List[Any]) => Any] = Map(
"=" -> ((a: List[Any]) => types._equal_Q(a(0), a(1))),
"throw" -> mal_throw _,
"nil?" -> ((a: List[Any]) => a(0) == null),
"true?" -> ((a: List[Any]) => a(0) == true),
"false?" -> ((a: List[Any]) => a(0) == false),
"symbol" -> ((a: List[Any]) => Symbol(a(0).asInstanceOf[String])),
"symbol?" -> ((a: List[Any]) => a(0).isInstanceOf[Symbol]),
"keyword" -> keyword _,
"keyword?" -> keyword_Q _,
"pr-str" -> ((a: List[Any]) => _pr_list(a, true, " ")),
"str" -> ((a: List[Any]) => _pr_list(a, false, "")),
"prn" -> ((a: List[Any]) => { println(_pr_list(a, true, " ")); null}),
"println" -> ((a: List[Any]) => { println(_pr_list(a, false, " ")); null}),
"readline" -> ((a: List[Any]) => readLine(a(0).asInstanceOf[String])),
"read-string" -> read_string _,
"slurp" -> slurp _,
"<" -> ((a: List[Any]) => _bool_op(a, _ < _)),
"<=" -> ((a: List[Any]) => _bool_op(a, _ <= _)),
">" -> ((a: List[Any]) => _bool_op(a, _ > _)),
">=" -> ((a: List[Any]) => _bool_op(a, _ >= _)),
"+" -> ((a: List[Any]) => _num_op(a, _ + _)),
"-" -> ((a: List[Any]) => _num_op(a, _ - _)),
"*" -> ((a: List[Any]) => _num_op(a, _ * _)),
"/" -> ((a: List[Any]) => _num_op(a, _ / _)),
"time-ms" -> ((a: List[Any]) => System.currentTimeMillis),
"list" -> ((a: List[Any]) => _list(a:_*)),
"list?" -> ((a: List[Any]) => _list_Q(a(0))),
"vector" -> ((a: List[Any]) => _vector(a:_*)),
"vector?" -> ((a: List[Any]) => _vector_Q(a(0))),
"hash-map" -> ((a: List[Any]) => _hash_map(a:_*)),
"map?" -> ((a: List[Any]) => _hash_map_Q(a(0))),
"assoc" -> assoc _,
"dissoc" -> dissoc _,
"get" -> get _,
"contains?" -> contains_Q _,
"keys" -> ((a: List[Any]) => a(0).asInstanceOf[MalHashMap].keys),
"vals" -> ((a: List[Any]) => a(0).asInstanceOf[MalHashMap].vals),
"sequential?" -> ((a: List[Any]) => types._sequential_Q(a(0))),
"cons" -> ((a: List[Any]) => a(0) +: a(1).asInstanceOf[MalList]),
"concat" -> concat _,
"nth" -> nth _,
"first" -> first _,
"rest" -> rest _,
"empty?" -> empty_Q _,
"count" -> count _,
"conj" -> conj _,
"apply" -> apply _,
"map" -> do_map _,
"with-meta" -> with_meta _,
"meta" -> meta _,
"atom" -> ((a: List[Any]) => new types.Atom(a(0))),
"atom?" -> ((a: List[Any]) => a(0).isInstanceOf[types.Atom]),
"deref" -> ((a: List[Any]) => a(0).asInstanceOf[types.Atom].value),
"reset!" -> reset_BANG _,
"swap!" -> swap_BANG _
)
}
// vim:ts=2:sw=2
| Chouser/mal | scala/core.scala | Scala | mpl-2.0 | 7,311 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.hadoop
import com.twitter.zipkin.gen.{Annotation, Span}
import com.twitter.scalding.{Tsv, DefaultDateRangeJob, Job, Args}
import com.twitter.zipkin.hadoop.sources.{TimeGranularity, SpanSource}
class GrepByAnnotation(args: Args) extends Job(args) with DefaultDateRangeJob {
val grepByWord = args.required("word")
val preprocessed =
SpanSource(TimeGranularity.Hour)
.read
.mapTo(0 -> ('traceid, 'annotations)) { s: Span => (s.trace_id, s.annotations.toList) }
.filter('annotations) { annotations: List[Annotation] =>
!annotations.filter(p => p.value.toLowerCase().contains(grepByWord)).isEmpty
}
.project('traceid)
.write(Tsv(args("output")))
} | netconstructor/zipkin | zipkin-hadoop/src/main/scala/com/twitter/zipkin/hadoop/GrepByAnnotation.scala | Scala | apache-2.0 | 1,322 |
object A:
export Tuple.{canEqualEmptyTuple as _, canEqualTuple as _, given, *}
| dotty-staging/dotty | tests/pos/forwardCompat-excludedExport/Test_r3.0.scala | Scala | apache-2.0 | 81 |
/*
* Copyright 2014 The Instalk Project
*
* The Instalk Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package im.instalk
import play.api.libs.json._
import play.api.libs.functional.syntax._
import org.joda.time.DateTime
package object protocol {
trait Request
trait RoomRequest extends Request {
def r: RoomId
}
trait Response
trait InFlightRequest
import DefaultFormats._
val VERSION = "0.1"
type Version = String
type SessionId = String
type RoomId = String
implicit val operation: Reads[OperationRequest] = new Reads[OperationRequest] {
def reads(o: JsValue): JsResult[OperationRequest] = {
(o \\ "o").asOpt[String] match {
case Some("join") =>
Json.fromJson[Join](o)
case Some("leave") =>
Json.fromJson[Leave](o)
case Some("bt") =>
Json.fromJson[BeginTyping](o)
case Some("st") =>
Json.fromJson[StoppedTyping](o)
case Some("away") =>
Json.fromJson[Away](o)
case Some("msg") =>
Json.fromJson[BroadcastMessageRequest](o)
case Some("fetch") =>
Json.fromJson[Fetch](o)
case Some("set-user-info") =>
Json.fromJson[SetUserInfoRequest](o)
case Some("set-room-topic") =>
Json.fromJson[SetRoomTopicRequest](o)
case Some(_) =>
JsError("operation.unknown")
case None =>
JsError("operation.missing")
}
}
}
object Validators {
val version: Reads[Version] = (__ \\ 'v).read[String]
val heartbeat: Reads[Long] = (__ \\ "heart-beat").read[Long]
}
object Responses {
def welcome(user: User) = Json.obj("welcome" -> 1, "user" -> user)
val timeout = Json.obj("timeout" -> 1)
def heartbeatAck(i: Long) = Json.obj("heart-beat-ack" -> i)
def notSupportedOp(op: String) = Json.obj("o" -> op) ++ Errors.unsupportedOp
def roomWelcome(roomId: RoomId, members: Iterable[User], lastMessages: Iterable[JsObject], topic: String) = Json.obj(
"r" -> roomId,
"o" -> "room-welcome",
"data" -> Json.obj(
"members" -> Json.toJson(members),
"messages" -> Json.toJson(lastMessages),
"topic" -> topic
)
)
def roomBye(roomId: RoomId) = Json.obj(
"r" -> roomId,
"o" -> "room-bye"
)
def setUserInfo(info: SetUserInfo) = Json.toJson(info).as[JsObject]
def setRoomTopic(resp: SetRoomTopicResponse) = Json.toJson(resp).as[JsObject]
def joinedRoom(roomId: RoomId, who: User, when: DateTime) = Json.obj(
"r" -> roomId,
"o" -> "joined",
"data" -> Json.obj(
"user" -> who,
"when" -> when
)
)
def leftRoom(roomId: RoomId, who: User, when: DateTime) = Json.obj(
"r" -> roomId,
"o" -> "left",
"data" -> Json.obj(
"user" -> who,
"when" -> when
)
)
def roomMessage(msg: RoomMessage): JsObject =
Json.obj(
"r" -> msg.r,
"o" -> "msg",
"data" -> Json.obj(
"msg" -> msg.envelope
)
)
def fetchBefore(roomId: RoomId, lastMessages: Iterable[JsObject]) = Json.obj(
"r" -> roomId,
"o" -> "fetch",
"data" -> Json.obj(
"messages" -> Json.toJson(lastMessages)
)
)
def beginTyping(roomId: RoomId, who: User): JsObject = Json.obj(
"r" -> roomId,
"o" -> "bt",
"data" -> Json.obj(
"sender" -> who.username,
"when" -> DateTime.now
)
)
def stoppedTyping(roomId: RoomId, who: User): JsObject = Json.obj(
"r" -> roomId,
"o" -> "st",
"data" -> Json.obj(
"sender" -> who.username,
"when" -> DateTime.now
)
)
def away(roomId: RoomId, who: User): JsObject = Json.obj(
"r" -> roomId,
"o" -> "away",
"data" -> Json.obj(
"sender" -> who.username,
"when" -> DateTime.now
)
)
}
object Errors {
private[this] def formatError(code: Int, msg: String) = Json.obj("error" -> Json.obj("code" -> code, "msg" -> msg))
private[this] def formatJsonError(code: Int, msg: String, e: JsError) =
Json.obj("error" -> Json.obj("code" -> code, "msg" -> msg, "errors" -> JsError.toFlatJson(e)))
val InvalidVersion = formatError(1, "version.invalid")
val unsupportedOp = formatError(2, "operation.unsupported")
val missingOperation = formatError(3, "operation.missing")
val notImplemented = formatError(999, "implementation.missing")
val unknownRoom = formatError(4, "room.unknown")
val notMemberInRoom = formatError(5, "room.member.notfound")
val badJson = formatError(401, "json.malformed")
def invalidOperationMessage(e: JsError) = formatJsonError(400, "request.bad", e)
}
} | AhmedSoliman/instalk | app/im/instalk/protocol/package.scala | Scala | apache-2.0 | 5,288 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.twitter.zipkin.collector
import com.twitter.scrooge.BinaryThriftStructSerializer
import com.twitter.util.Future
import com.twitter.zipkin.common.{Span, Annotation}
import com.twitter.zipkin.config.sampler.AdjustableRateConfig
import com.twitter.zipkin.config.ScribeZipkinCollectorConfig
import com.twitter.zipkin.conversions.thrift._
import com.twitter.zipkin.gen
import com.twitter.zipkin.storage.Aggregates
import org.specs.Specification
import org.specs.mock.{ClassMocker, JMocker}
class ScribeCollectorServiceSpec extends Specification with JMocker with ClassMocker {
val serializer = new BinaryThriftStructSerializer[gen.Span] {
def codec = gen.Span
}
val category = "zipkin"
val validSpan = Span(123, "boo", 456, None, List(new Annotation(1, "bah", None)), Nil)
val validList = List(gen.LogEntry(category, serializer.toString(validSpan.toThrift)))
val wrongCatList = List(gen.LogEntry("wrongcat", serializer.toString(validSpan.toThrift)))
val base64 = "CgABAAAAAAAAAHsLAAMAAAADYm9vCgAEAAAAAAAAAcgPAAYMAAAAAQoAAQAAAAAAAAABCwACAAAAA2JhaAAPAAgMAAAAAAIACQAA"
val queue = mock[WriteQueue[Seq[String]]]
val zkSampleRateConfig = mock[AdjustableRateConfig]
val mockAggregates = mock[Aggregates]
val config = new ScribeZipkinCollectorConfig {
def writeQueueConfig = null
def zkConfig = null
def indexConfig = null
def storageConfig = null
def aggregatesConfig = null
def methodConfig = null
override lazy val writeQueue = queue
override lazy val sampleRateConfig = zkSampleRateConfig
override lazy val aggregates = mockAggregates
}
def scribeCollectorService = new ScribeCollectorService(config, config.writeQueue, Set(category)) {
running = true
}
"ScribeCollectorService" should {
"add to queue" in {
val cs = scribeCollectorService
expect {
one(queue).add(List(base64)) willReturn(true)
}
gen.ResultCode.Ok mustEqual cs.log(validList)()
}
"push back" in {
val cs = scribeCollectorService
expect {
one(queue).add(List(base64)) willReturn(false)
}
gen.ResultCode.TryLater mustEqual cs.log(validList)()
}
"ignore wrong category" in {
val cs = scribeCollectorService
expect {
never(queue).add(any)
}
gen.ResultCode.Ok mustEqual cs.log(wrongCatList)()
}
"get sample rate" in {
val cs = scribeCollectorService
val sampleRate = 0.3
expect {
one(zkSampleRateConfig).get willReturn sampleRate
}
val result = cs.getSampleRate
result() mustEqual sampleRate
}
"set sample rate" in {
val cs = scribeCollectorService
val sampleRate = 0.4
val expected = Future.Unit
expect {
one(zkSampleRateConfig).set(sampleRate)
}
val actual = cs.setSampleRate(sampleRate)
actual() mustEqual expected()
}
"store aggregates" in {
val serviceName = "mockingbird"
val annotations = Seq("a" , "b", "c")
val dependencies = Seq("service1:10", "service2:5")
"store top annotations" in {
val cs = scribeCollectorService
expect {
one(mockAggregates).storeTopAnnotations(serviceName, annotations)
}
cs.storeTopAnnotations(serviceName, annotations)
}
"store top key value annotations" in {
val cs = scribeCollectorService
expect {
one(mockAggregates).storeTopKeyValueAnnotations(serviceName, annotations)
}
cs.storeTopKeyValueAnnotations(serviceName, annotations)
}
"store dependencies" in {
val cs = scribeCollectorService
expect {
one(mockAggregates).storeDependencies(serviceName, dependencies)
}
cs.storeDependencies(serviceName, dependencies)
}
}
}
}
| cordoval/zipkin | zipkin-collector-scribe/src/test/scala/com/twitter/zipkin/collector/ScribeCollectorServiceSpec.scala | Scala | apache-2.0 | 4,462 |
package scalan
import scala.collection.Seq
import scalan.util.CollectionUtil
trait Analyzing { self: Scalan => }
trait AnalyzingStd extends Analyzing { self: ScalanStd => }
trait AnalyzingExp extends Analyzing { self: ScalanExp =>
case class LevelCount[T](level: Int)(implicit val elem: Elem[T]) extends Marking[T] {
def nonEmpty = true
}
implicit object LevelCountLattice extends Lattice[LevelCount] {
def maximal[T:Elem] = Some(LevelCount(Int.MaxValue))
def minimal[T:Elem] = Some(LevelCount(0))
def join[T](a: LevelCount[T], b: LevelCount[T]) = {
implicit val eT = a.elem
LevelCount(a.level max b.level)
}
}
class LevelAnalyzer extends BackwardAnalyzer[LevelCount] {
val name = "levelCounter"
def lattice = LevelCountLattice
def defaultMarking[T:Elem] = LevelCount[T](0)
def mkLevelMark[T](level: Int)(eT: Elem[T]) = LevelCount(level)(eT)
def updateMark[T](s: Exp[T], level: Int): (Exp[T], LevelCount[T]) = {
updateMark(s, mkLevelMark(level)(s.elem))
}
def getLambdaMarking[A, B](lam: Lambda[A, B], mDom: LevelCount[A], mRange: LevelCount[B]): LevelCount[(A) => B] =
mkLevelMark(0)(lam.elem)
def getInboundMarkings[T](te: TableEntry[T], outMark: LevelCount[T]): MarkedSyms = {
val l = outMark.level
te.rhs match {
case ArrayMap(xs: Arr[a], f) =>
Seq[MarkedSym](updateMark(xs, l), updateMark(f, l + 1))
case ArraySortBy(xs: Arr[a], f, _) =>
Seq[MarkedSym](updateMark(xs, l), updateMark(f, l + 1))
case ArrayFold(xs: Arr[a], init, f) =>
Seq[MarkedSym](
updateMark(xs, l), updateMark(init, l), updateMark(f, l + 1)
)
case ArrayFilter(xs: Arr[a], p) =>
Seq[MarkedSym](updateMark(xs, l), updateMark(p, l + 1))
case lam: Lambda[a,b] =>
Seq[MarkedSym](updateMark(lam.y, l))
case _ =>
te.rhs.getDeps.toSeq.map(s => updateMark(s, l))
}
}
}
/**
* Represents usage summary of the symbol it is attached
* @param counters Key - usage level, Value - symbols that use this symbol from the corresponding level
*/
case class UsageCount[T](counters: Map[Int, Seq[Exp[_]]])(implicit val elem: Elem[T]) extends Marking[T] {
def nonEmpty = true
}
implicit object UsageCountLattice extends Lattice[UsageCount] {
def maximal[T:Elem] = None
def minimal[T:Elem] = Some(UsageCount(Map()))
def join[T](a: UsageCount[T], b: UsageCount[T]) = {
implicit val eT = a.elem
UsageCount[T](
CollectionUtil.outerJoin(a.counters, b.counters)
((_,l) => l, (_,r) => r, (k,l,r) => (l ++ r).distinct))
}
}
class UsageAnalyzer extends BackwardAnalyzer[UsageCount] {
val name = "usageCounter"
val levelAnalyzer = new LevelAnalyzer // use this to access markings
def lattice = UsageCountLattice
def defaultMarking[T:Elem] = UsageCount[T](Map())
def mkUsageMark[T](counters: Map[Int,Seq[Exp[_]]])(eT: Elem[T]) = UsageCount(counters)(eT)
def promoteMark[T](s: Exp[T], counters: Map[Int,Seq[Exp[_]]]): (Exp[T], UsageCount[T]) = {
s -> mkUsageMark(counters)(s.elem)
}
def getLevel[T](s: Exp[T]): Int = levelAnalyzer.getMark(s).level
def getLambdaMarking[A, B](lam: Lambda[A, B], mDom: UsageCount[A], mRange: UsageCount[B]): UsageCount[(A) => B] =
mkUsageMark(Map())(lam.elem)
def getInboundMarkings[T](te: TableEntry[T], outMark: UsageCount[T]): MarkedSyms = {
te.rhs match {
case l: Lambda[a,b] => Seq()
case _ =>
val l = getLevel(te.sym)
te.rhs.getDeps.toSeq.map(s => {
promoteMark(s, Map(l -> Seq(te.sym)))
})
}
}
}
}
| scalan/scalan | core/src/main/scala/scalan/staged/Analizing.scala | Scala | apache-2.0 | 3,759 |
package com.alexitc.coinalerts.data.anorm.dao
import java.sql.Connection
import anorm.SQL
import com.alexitc.coinalerts.data.anorm.parsers.UserParsers
import com.alexitc.coinalerts.models._
class UserPostgresDAO {
import UserParsers._
def create(email: UserEmail, password: UserHiddenPassword)(implicit conn: Connection): Option[User] = {
val userId = UserId.create
val userMaybe = SQL(
"""
|INSERT INTO users (user_id, email, password)
|VALUES ({user_id}, {email}, {password})
|ON CONFLICT (email) DO NOTHING
|RETURNING user_id, email
""".stripMargin
).on(
"user_id" -> userId.string,
"email" -> email.string,
"password" -> password.string
)
.as(parseUser.singleOpt)
userMaybe
}
def createVerificationToken(userId: UserId)(implicit conn: Connection): Option[UserVerificationToken] = {
val token = UserVerificationToken.create(userId)
val tokenMaybe = SQL(
"""
|INSERT INTO user_verification_tokens
| (user_id, token)
|VALUES
| ({user_id}, {token})
|ON CONFLICT DO NOTHING
|RETURNING token
""".stripMargin
).on(
"user_id" -> userId.string,
"token" -> token.string
)
.as(parseUserVerificationToken.singleOpt)
tokenMaybe
}
def verifyEmail(token: UserVerificationToken)(implicit conn: Connection): Option[User] = {
val userMaybe = SQL(
"""
|UPDATE users u
|SET verified_on = NOW()
|FROM user_verification_tokens t
|WHERE u.user_id = t.user_id AND
| u.verified_on IS NULL AND
| token = {token}
|RETURNING u.user_id, u.email
""".stripMargin
).on(
"token" -> token.string
)
.as(parseUser.singleOpt)
userMaybe
}
def getVerifiedUserPassword(email: UserEmail)(implicit conn: Connection): Option[UserHiddenPassword] = {
val passwordMaybe = SQL(
"""
|SELECT password
|FROM users
|WHERE verified_on IS NOT NULL AND
| email = {email}
""".stripMargin
).on(
"email" -> email.string
)
.as(parsePassword.singleOpt)
passwordMaybe
}
def getVerifiedUserByEmail(email: UserEmail)(implicit conn: Connection): Option[User] = {
val userMaybe = SQL(
"""
|SELECT user_id, email
|FROM users
|WHERE verified_on IS NOT NULL AND
| email = {email}
""".stripMargin
).on(
"email" -> email.string
)
.as(parseUser.singleOpt)
userMaybe
}
def getVerifiedUserById(userId: UserId)(implicit conn: Connection): Option[User] = {
val userMaybe = SQL(
"""
|SELECT user_id, email
|FROM users
|WHERE verified_on IS NOT NULL AND
| user_id = {user_id}
""".stripMargin
).on(
"user_id" -> userId.string
)
.as(parseUser.singleOpt)
userMaybe
}
def getUserPreferences(userId: UserId)(implicit conn: Connection): Option[UserPreferences] = {
SQL(
"""
|SELECT user_id, lang
|FROM user_preferences
|WHERE user_id = {user_id}
""".stripMargin
).on(
"user_id" -> userId.string
)
.as(parseUserPreferences.singleOpt)
}
def setUserPreferences(userId: UserId, preferencesModel: SetUserPreferencesModel)(
implicit conn: Connection): Option[UserPreferences] = {
SQL(
"""
|INSERT INTO user_preferences
| (user_id, lang)
|VALUES
| ({user_id}, {lang})
|ON CONFLICT (user_id) DO UPDATE
| SET lang = EXCLUDED.lang
|RETURNING user_id, lang
""".stripMargin
).on(
"user_id" -> userId.string,
"lang" -> preferencesModel.lang.code
)
.as(parseUserPreferences.singleOpt)
}
}
| AlexITC/crypto-coin-alerts | alerts-server/app/com/alexitc/coinalerts/data/anorm/dao/UserPostgresDAO.scala | Scala | gpl-3.0 | 3,931 |
package com.commodityvectors.snapshotmatchers.playJson
import com.commodityvectors.snapshotmatchers.{SnapshotLoader, SnapshotMessages, SnapshotSerializer}
import org.scalactic.Equality
import org.scalatest.matchers.{MatchResult, Matcher}
import play.api.libs.json.{JsValue, Json, Reads}
trait PlayJsonSnapshotMatcher extends SnapshotLoader with SnapshotMessages {
implicit lazy val playJsonSerializer = new SnapshotSerializer[JsValue] {
override def serialize(in: JsValue): String = Json.prettyPrint(in)
}
class JsonDeserializerShouldMatch[T](in: T)(implicit reads: Reads[T], equals: Equality[T]) extends Matcher[String] {
override def apply(explicitId: String): MatchResult = {
loadSnapshot(explicitId) match {
case Some(content) =>
val parsed = Json.parse(content).as[T]
val isEquals = equals.areEqual(parsed, in)
MatchResult(isEquals, errorMessage(in.toString, parsed.toString), ContentsAreEqual)
case None => MatchResult(matches = false, s"Could not find snapshot for id: $explicitId", ContentsAreEqual)
}
}
}
def deserializeAs[T](in: T)(implicit reads: Reads[T], equals: Equality[T]): Matcher[String] =
new JsonDeserializerShouldMatch[T](in)
}
| commodityvectors/scalatest-snapshot-matchers | scalatest-snapshot-matcher-play-json/src/main/scala/com/commodityvectors/snapshotmatchers/playJson/PlayJsonSnapshotMatcher.scala | Scala | mit | 1,237 |
package mesosphere.marathon
import mesosphere.marathon.core.flow.{ ReviveOffersConfig, LaunchTokenConfig }
import mesosphere.marathon.core.launcher.OfferProcessorConfig
import mesosphere.marathon.core.launchqueue.LaunchQueueConfig
import mesosphere.marathon.core.matcher.manager.OfferMatcherManagerConfig
import mesosphere.marathon.core.plugin.PluginManagerConfiguration
import mesosphere.marathon.core.task.tracker.TaskTrackerConfig
import mesosphere.marathon.core.task.update.TaskStatusUpdateConfig
import mesosphere.marathon.state.ResourceRole
import mesosphere.marathon.upgrade.UpgradeConfig
import org.rogach.scallop.ScallopConf
import scala.sys.SystemProperties
import mesosphere.marathon.io.storage.StorageProvider
trait MarathonConf
extends ScallopConf with ZookeeperConf with LeaderProxyConf
with LaunchTokenConfig with OfferMatcherManagerConfig with OfferProcessorConfig with ReviveOffersConfig
with MarathonSchedulerServiceConfig with LaunchQueueConfig with PluginManagerConfiguration
with TaskStatusUpdateConfig with TaskTrackerConfig with UpgradeConfig {
//scalastyle:off magic.number
lazy val mesosMaster = opt[String]("master",
descr = "The URL of the Mesos master",
required = true,
noshort = true)
lazy val mesosLeaderUiUrl = opt[String]("mesos_leader_ui_url",
descr = "The host and port (e.g. \\"http://mesos_host:5050\\") of the Mesos master.",
required = false,
noshort = true)
lazy val features = opt[String]("enable_features",
descr = s"A comma-separated list of features. Available features are: ${Features.description}",
required = false,
default = None,
noshort = true,
validate = validateFeatures
)
lazy val availableFeatures: Set[String] = features.get.map(parseFeatures).getOrElse(Set.empty)
private[this] def parseFeatures(str: String): Set[String] =
str.split(',').map(_.trim).filter(_.nonEmpty).toSet
private[this] def validateFeatures(str: String): Boolean = {
val parsed = parseFeatures(str)
// throw exceptions for better error messages
val unknownFeatures = parsed.filter(!Features.availableFeatures.contains(_))
lazy val unknownFeaturesString = unknownFeatures.mkString(", ")
require(
unknownFeatures.isEmpty,
s"Unknown features specified: $unknownFeaturesString. Available features are: ${Features.description}"
)
true
}
def isFeatureSet(name: String): Boolean = availableFeatures.contains(name)
lazy val mesosFailoverTimeout = opt[Long]("failover_timeout",
descr = "(Default: 1 week) The failover_timeout for mesos in seconds.",
default = Some(604800L))
lazy val highlyAvailable = toggle("ha",
descrYes = "(Default) Run Marathon in HA mode with leader election. " +
"Allows starting an arbitrary number of other Marathons but all need " +
"to be started in HA mode. This mode requires a running ZooKeeper",
descrNo = "Run Marathon in single node mode.",
prefix = "disable_",
noshort = true,
default = Some(true))
lazy val checkpoint = toggle("checkpoint",
descrYes = "(Default) Enable checkpointing of tasks. " +
"Requires checkpointing enabled on slaves. Allows tasks to continue " +
"running during mesos-slave restarts and upgrades",
descrNo = "Disable checkpointing of tasks.",
prefix = "disable_",
noshort = true,
default = Some(true))
lazy val localPortMin = opt[Int]("local_port_min",
descr = "Min port number to use when assigning globally unique service ports to apps.",
default = Some(10000))
lazy val localPortMax = opt[Int]("local_port_max",
descr = "Max port number to use when assigning globally unique service ports to apps.",
default = Some(20000))
lazy val defaultExecutor = opt[String]("executor",
descr = "Executor to use when none is specified. If not defined the Mesos command executor is used by default.",
default = Some("//cmd"))
lazy val hostname = opt[String]("hostname",
descr = "The advertised hostname that is used for the communication with the Mesos master. " +
"The value is also stored in the persistent store so another standby host can redirect to the elected leader.",
default = Some(java.net.InetAddress.getLocalHost.getHostName))
lazy val webuiUrl = opt[String]("webui_url",
descr = "The HTTP(S) url of the web ui, defaulting to the advertised hostname.",
noshort = true,
default = None)
lazy val maxConcurrentHttpConnections = opt[Int]("http_max_concurrent_requests",
descr = "The number of concurrent HTTP requests that are allowed before rejecting.",
noshort = true,
default = None
)
lazy val accessControlAllowOrigin = opt[String]("access_control_allow_origin",
descr = "The origin(s) to allow in Marathon. Not set by default. " +
"Example values are \\"*\\", or " +
"\\"http://localhost:8888, http://domain.com\\"",
noshort = true,
default = None)
lazy val eventStreamMaxOutstandingMessages = opt[Int]("event_stream_max_outstanding_messages",
descr = "The event stream buffers events, that are not already consumed by clients. " +
"This number defines the number of events that get buffered on the server side, before messages are dropped.",
noshort = true,
default = Some(50)
)
def executor: Executor = Executor.dispatch(defaultExecutor())
lazy val mesosRole = opt[String]("mesos_role",
descr = "Mesos role for this framework. " +
"If set, Marathon receives resource offers for the specified role in addition to " +
"resources with the role designation '*'.",
default = None)
def expectedResourceRoles: Set[String] = mesosRole.get match {
case Some(role) => Set(role, ResourceRole.Unreserved)
case None => Set(ResourceRole.Unreserved)
}
lazy val defaultAcceptedResourceRolesSet = defaultAcceptedResourceRoles.get.getOrElse(expectedResourceRoles)
lazy val defaultAcceptedResourceRoles = opt[String]("default_accepted_resource_roles",
descr =
"Default for the defaultAcceptedResourceRoles attribute of all app definitions" +
" as a comma-separated list of strings. " +
"This defaults to all roles for which this Marathon instance is configured to receive offers.",
default = None,
validate = validateDefaultAcceptedResourceRoles).map(parseDefaultAcceptedResourceRoles)
private[this] def parseDefaultAcceptedResourceRoles(str: String): Set[String] =
str.split(',').map(_.trim).toSet
private[this] def validateDefaultAcceptedResourceRoles(str: String): Boolean = {
val parsed = parseDefaultAcceptedResourceRoles(str)
// throw exceptions for better error messages
require(parsed.nonEmpty, "--default_accepted_resource_roles must not be empty")
require(parsed.forall(expectedResourceRoles),
"--default_accepted_resource_roles contains roles for which we will not receive offers: " +
(parsed -- expectedResourceRoles).mkString(", "))
true
}
lazy val taskLaunchConfirmTimeout = opt[Long]("task_launch_confirm_timeout",
descr = "Time, in milliseconds, to wait for a task to enter " +
"the TASK_STAGING state before killing it.",
default = Some(300000L))
lazy val taskLaunchTimeout = opt[Long]("task_launch_timeout",
descr = "Time, in milliseconds, to wait for a task to enter " +
"the TASK_RUNNING state before killing it.",
default = Some(300000L)) // 300 seconds (5 minutes)
lazy val taskReservationTimeout = opt[Long]("task_reservation_timeout",
descr = "Time, in milliseconds, to wait for a new reservation to be acknowledged " +
"via a received offer before deleting it.",
default = Some(20000L)) // 20 seconds
lazy val reconciliationInitialDelay = opt[Long]("reconciliation_initial_delay",
descr = "This is the length of time, in milliseconds, before Marathon " +
"begins to periodically perform task reconciliation operations",
default = Some(15000L)) // 15 seconds
lazy val reconciliationInterval = opt[Long]("reconciliation_interval",
descr = "This is the length of time, in milliseconds, between task " +
"reconciliation operations.",
default = Some(600000L)) // 600 seconds (10 minutes)
lazy val scaleAppsInitialDelay = opt[Long]("scale_apps_initial_delay",
descr = "This is the length of time, in milliseconds, before Marathon " +
"begins to periodically attempt to scale apps.",
default = Some(15000L)) // 15 seconds
lazy val scaleAppsInterval = opt[Long]("scale_apps_interval",
descr = "This is the length of time, in milliseconds, between task " +
"scale apps.",
default = Some(300000L)) // 300 seconds (5 minutes)
@deprecated("marathon_store_timeout is no longer used and will be removed soon.", "v0.12")
lazy val marathonStoreTimeout = opt[Long]("marathon_store_timeout",
descr = "(deprecated) Maximum time, in milliseconds, to wait for persistent storage " +
"operations to complete. This option is no longer used and " +
"will be removed in a later release.",
default = None)
lazy val mesosUser = opt[String]("mesos_user",
descr = "Mesos user for this framework.",
default = new SystemProperties().get("user.name")) // Current logged in user
lazy val frameworkName = opt[String]("framework_name",
descr = "Framework name to register with Mesos.",
default = Some("marathon"))
lazy val artifactStore = opt[String]("artifact_store",
descr = "URL to the artifact store. " +
s"""Supported store types ${StorageProvider.examples.keySet.mkString(", ")}. """ +
s"""Example: ${StorageProvider.examples.values.mkString(", ")}""",
validate = StorageProvider.isValidUrl,
noshort = true
)
lazy val mesosAuthenticationPrincipal = opt[String]("mesos_authentication_principal",
descr = "Mesos Authentication Principal.",
noshort = true
)
lazy val mesosAuthenticationSecretFile = opt[String]("mesos_authentication_secret_file",
descr = "Mesos Authentication Secret.",
noshort = true
)
lazy val envVarsPrefix = opt[String]("env_vars_prefix",
descr = "Prefix to use for environment variables injected automatically into all started tasks.",
noshort = true
)
//Internal settings, that are not intended for external use
lazy val internalStoreBackend = opt[String]("internal_store_backend",
descr = "The backend storage system to use. One of zk, mesos_zk, mem.",
hidden = true,
validate = Set("zk", "mesos_zk", "mem").contains,
default = Some("zk")
)
lazy val maxApps = opt[Int]("max_apps",
descr = "The maximum number of applications that may be created.",
noshort = true
)
lazy val storeCache = toggle("store_cache",
default = Some(true),
noshort = true,
descrYes = "(Default) Enable an in-memory cache for the storage layer.",
descrNo = "Disable the in-memory cache for the storage layer. ",
prefix = "disable_"
)
lazy val onElectedPrepareTimeout = opt[Long] ("on_elected_prepare_timeout",
descr = "The timeout for preparing the Marathon instance when elected as leader.",
default = Some(3 * 60 * 1000L) //3 minutes
)
lazy val internalMaxQueuedRootGroupUpdates = opt[Int]("max_queued_root_group_updates",
descr = "INTERNAL TUNING PARAMETER: " +
"The maximum number of root group updates that we queue before rejecting updates.",
noshort = true,
hidden = true,
default = Some(500)
)
}
| ss75710541/marathon | src/main/scala/mesosphere/marathon/MarathonConf.scala | Scala | apache-2.0 | 11,468 |
/*
* Copyright (c) 2012 Roberto Tyley
*
* This file is part of 'BFG Repo-Cleaner' - a tool for removing large
* or troublesome blobs from Git repositories.
*
* BFG Repo-Cleaner is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* BFG Repo-Cleaner is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/ .
*/
package com.madgag.git.bfg.cli
import com.madgag.git._
import com.madgag.git.bfg.cleaner._
import com.madgag.git.bfg.GitUtil._
object Main extends App {
if (args.isEmpty) {
CLIConfig.parser.showUsage
} else {
CLIConfig.parser.parse(args, CLIConfig()) map {
config =>
tweakStaticJGitConfig(config.massiveNonFileObjects)
if (config.gitdir.isEmpty) {
CLIConfig.parser.showUsage
Console.err.println("Aborting : " + config.repoLocation + " is not a valid Git repository.\\n")
} else {
implicit val repo = config.repo
println("\\nUsing repo : " + repo.getDirectory.getAbsolutePath + "\\n")
// do this before implicitly initiating big-blob search
if (hasBeenProcessedByBFGBefore(repo)) {
println("\\nThis repo has been processed by The BFG before! Will prune repo before proceeding - to avoid unnecessary cleaning work on unused objects...")
repo.git.gc.call()
println("Completed prune of old objects - will now proceed with the main job!\\n")
}
if (config.definesNoWork) {
Console.err.println("Please specify tasks for The BFG :")
CLIConfig.parser.showUsage
} else {
println("Found " + config.objectProtection.fixedObjectIds.size + " objects to protect")
RepoRewriter.rewrite(repo, config.objectIdCleanerConfig)
repo.close()
println("\\n\\nHas the BFG saved you time? Support the BFG on BountySource: https://j.mp/fund-bfg\\n\\n")
}
}
}
}
} | NeilBryant/bfg-repo-cleaner | bfg/src/main/scala/com/madgag/git/bfg/cli/Main.scala | Scala | gpl-3.0 | 2,424 |
package com.catikkas.aiorserver
object Main {
def main(args: Array[String]): Unit = {
import akka.actor._
val system = ActorSystem("aior-server")
discarding { system.actorOf(Supervisor.props, "sup") }
}
}
import akka.actor._
import akka.actor.CoordinatedShutdown._
import akka.event.LoggingReceive
class Supervisor extends Actor with ActorLogging {
import Supervisor._
override def preStart(): Unit = {
self ! Initialize
}
def receive: Receive = notInitialized
def notInitialized: Receive = LoggingReceive.withLabel("notInitialized") {
case Initialize =>
val robot = context watch context.actorOf(Robot.props, "robot")
val server = context watch context.actorOf(Server.props(robot), "server")
val ctrld = context watch context.actorOf(CtrlD.props, "ctrld")
context become initialized(robot, server, ctrld)
}
def initialized(
robot: ActorRef,
server: ActorRef,
ctrld: ActorRef
): Receive = LoggingReceive.withLabel("initialized") {
case Terminated(`robot`) =>
log.error("java.awt.Robot terminated. Please make sure environment is not headless.")
terminate()
case Terminated(`server`) =>
log.error("Server failed to bind. Please check config.")
terminate()
case Terminated(`ctrld`) => terminate()
}
def terminate(): Unit = discarding {
log.info("Shutting down now.")
CoordinatedShutdown(context.system).run(JvmExitReason)
}
}
object Supervisor {
def props = Props(new Supervisor)
sealed trait Command
final case object Initialize extends Command
}
| cemcatik/aior-server | src/main/scala/com/catikkas/aiorserver/Main.scala | Scala | mit | 1,601 |
package org.dberg.hubot
import org.scalatest.DoNotDiscover
import scodec.codecs.ImplicitCodecs
@DoNotDiscover
class BrainTestSuite extends SpecBase with ImplicitCodecs {
val brain = hubot.brainService
"Hubot Brain" should "set a string correctly" in {
brain.set[String]("testkey", "testvalue")
val result = hubot.brainService.get[String]("testkey").getOrElse("failed")
assert(result == "testvalue")
}
"Hubot Brain" should "set a List[String] correctly" in {
brain.set[List[String]]("testkey", List("testvalue1", "testvalue2"))
val result = brain.get[List[String]]("testkey").getOrElse(List())
assert(result == List("testvalue1", "testvalue2"))
}
"Hubot Brain" should "set an Int correctly" in {
brain.set[Int]("intkey", 1000)
val result = brain.get[Int]("intkey").getOrElse(0)
assert(result == 1000)
}
} | denen99/hubot-scala | src/test/scala/org/dberg/hubot/BrainTestSuite.scala | Scala | apache-2.0 | 860 |
package org.vvcephei.scalaofx.client
import scala.io.Source
import org.vvcephei.scalaofx.lib.parser.TopLevelOfxMessageParser
import org.vvcephei.scalaofx.lib.model.response.{BankStatementResponse, BankStatement}
object SourceClient {
def bankStatements(source: Source): BankStatementResponse = {
val parsed = TopLevelOfxMessageParser parse source.mkString
BankStatement fromOfx parsed.ofx
}
}
| vvcephei/scala-ofx | src/main/scala/org/vvcephei/scalaofx/client/SourceClient.scala | Scala | apache-2.0 | 407 |
package io.eels.datastream
import io.eels.Row
case class IteratorAction(ds: DataStream) {
def execute: Iterator[Row] = ds.toVector.iterator
}
| eel-lib/eel | eel-core/src/main/scala/io/eels/datastream/IteratorAction.scala | Scala | mit | 146 |
package com.twitter.util
import com.twitter.util.Base64Long.{fromBase64, StandardBase64Alphabet, toBase64}
import org.junit.runner.RunWith
import org.scalacheck.Arbitrary
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Gen
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.scalatest.prop.GeneratorDrivenPropertyChecks
object Base64LongTest {
private val BoundaryValues =
Seq(0L, 1L, 63L, 64L, 4095L, 4096L, -1L, Long.MinValue, Long.MaxValue)
private type Alphabet = PartialFunction[Int, Char]
private def genAlphabet(g: Gen[Char]): Gen[Alphabet] = {
def go(seen: List[Char], n: Int, iterations: Int): Gen[Alphabet] =
if (n > 0 && iterations < 10000) {
g.flatMap { c =>
if (seen.contains(c)) go(seen, n, iterations + 1)
else go(c :: seen, n - 1, iterations + 1)
}
} else {
Gen.const(Base64Long.alphabet(seen))
}
go(Nil, 64, 0)
}
private implicit val arbAlphabet =
Arbitrary[Alphabet](Gen.oneOf(
Gen.const(StandardBase64Alphabet),
// An alphabet that should trigger the general alphabet inversion
// code
genAlphabet(arbitrary[Char]),
// An alphabet that should trigger the specialized alphabet
// inversion code
genAlphabet(Gen.oneOf(0.toChar.until(128.toChar).filterNot(Character.isISOControl)))
))
private def expectedLength(n: Long): Int = {
var k = n
var i = 0
while (k != 0) {
i += 1
assert(i < 12)
k = k >>> 6
}
i.max(1) // The special case representation of zero has length 1
}
}
@RunWith(classOf[JUnitRunner])
class Base64LongTest extends FunSuite with GeneratorDrivenPropertyChecks {
import Base64LongTest._
private[this] def forAllLongs(f: (Alphabet, Long) => Unit): Unit = {
BoundaryValues.foreach(f(StandardBase64Alphabet, _))
forAll((a: Alphabet) => BoundaryValues.foreach(f(a, _)))
forAll(f)
}
test("toBase64 properly converts zero") {
assert(toBase64(0) == "A")
val b = new StringBuilder
forAll { (a: Alphabet) =>
b.setLength(0)
toBase64(b, 0, a)
assert(b.result == a(0).toString)
}
}
test("toBase64 properly converts a large number") {
assert(toBase64(202128261025763330L) == "LOGpUdghAC")
}
test("toBase64 uses the expected number of digits") {
BoundaryValues.foreach { (n: Long) =>
assert(toBase64(n).length == expectedLength(n))
}
forAll((n: Long) => assert(toBase64(n).length == expectedLength(n)))
val b = new StringBuilder
forAllLongs { (a, n) =>
b.setLength(0)
toBase64(b, n, a)
assert(b.length == expectedLength(n))
}
}
test("fromBase64 is the inverse of toBase64") {
BoundaryValues.foreach { l =>
assert(l == fromBase64(toBase64(l)))
}
val b = new StringBuilder
forAllLongs { (a, n) =>
val inv = Base64Long.invertAlphabet(a)
val start = b.length
toBase64(b, n, a)
assert(fromBase64(b, start, b.length, inv) == n)
// Leading zeroes are dropped
val start2 = b.length
b.append(a(0))
toBase64(b, n, a)
assert(fromBase64(b, start2, b.length, inv) == n)
}
}
test("fromBase64 throws an IllegalArgumentException exception for characters out of range") {
forAll { (s: String, a: Alphabet) =>
if (s.exists(!a.isDefinedAt(_))) {
assertThrows[IllegalArgumentException](fromBase64(s))
}
}
}
test("fromBase64 throws an ArithmeticException when overflow is encountered") {
// 2 ^ 64, or (1 << 64) which is an overflow
// Q = 16 or 2^4 and each of the 10 'A's is 64 or 2^6
val twoToThe64th = "QAAAAAAAAAA"
assertThrows[ArithmeticException] {
fromBase64(twoToThe64th)
}
}
}
| BuoyantIO/twitter-util | util-core/src/test/scala/com/twitter/util/Base64LongTest.scala | Scala | apache-2.0 | 3,784 |
package info.folone.scala.poi
import org.apache.poi.hssf.usermodel.HSSFFont.FONT_ARIAL
import org.apache.poi.ss.usermodel.{Font => POIFont, DataFormat => POIDataFormat}
case class CellStyle(font: Font, dataFormat: DataFormat)
case class Font(name: String = FONT_ARIAL, bold: Boolean = false, color: Short = POIFont.COLOR_NORMAL) {
def appliedTo(pf: POIFont): POIFont = {
pf setFontName name
pf setBold bold
pf setColor color
pf
}
}
case class DataFormat(format: String) {
def appliedTo(poiDataFormat: POIDataFormat): Short = {
poiDataFormat.getFormat(format)
}
}
| folone/poi.scala | src/main/scala/info.folone/scala.poi/CellStyle.scala | Scala | apache-2.0 | 598 |
package db
import java.util.UUID
import io.flow.dependency.v0.models.{Library, Organization, Visibility}
import util.DependencySpec
class LibrariesDaoSpec extends DependencySpec {
private[this] lazy val org: Organization = createOrganization()
private[this] def findAll(
auth: Authorization = Authorization.All,
id: Option[String] = None,
ids: Option[Seq[String]] = None,
organizationId: Option[String] = None,
resolverId: Option[String] = None,
prefix: Option[String] = None,
limit: Option[Long] = None,
offset: Long = 0,
): Seq[Library] = {
librariesDao.findAll(
auth,
id = id,
ids = ids,
organizationId = organizationId,
resolverId = resolverId,
prefix = prefix,
limit = limit,
offset = offset,
)
}
"findByGroupIdAndArtifactId" in {
val library = createLibrary(org)
librariesDao.findByGroupIdAndArtifactId(
Authorization.All,
library.groupId,
library.artifactId
).map(_.id) must be(Some(library.id))
librariesDao.findByGroupIdAndArtifactId(
Authorization.All,
library.groupId + "-2",
library.artifactId
) must be (None)
librariesDao.findByGroupIdAndArtifactId(
Authorization.All,
library.groupId,
library.artifactId + "-2"
) must be (None)
}
"findById" in {
val library = createLibrary(org)
librariesDao.findById(Authorization.All, library.id).map(_.id) must be(
Some(library.id)
)
librariesDao.findById(Authorization.All, UUID.randomUUID.toString) must be(None)
}
"findAll by ids" in {
val library1 = createLibrary(org)
val library2 = createLibrary(org)
findAll(ids = Some(Seq(library1.id, library2.id))).map(_.id) must be(
Seq(library1, library2).sortWith { (x,y) => x.groupId.toString < y.groupId.toString }.map(_.id)
)
findAll(ids = Some(Nil)) must be(Nil)
findAll(ids = Some(Seq(UUID.randomUUID.toString))) must be(Nil)
findAll(ids = Some(Seq(library1.id, UUID.randomUUID.toString))).map(_.id) must be(Seq(library1.id))
}
"findAll by resolver" in {
val resolver = createResolver(org)
val form = createLibraryForm(org).copy(resolverId = resolver.id)
val library = createLibrary(org)(form)
findAll(resolverId = Some(resolver.id)).map(_.id) must be(Seq(library.id))
}
"findAll by prefix" in {
val org = createOrganization()
val resolver = createResolver(org)
val library1 = createLibrary(org)(
createLibraryForm(org).copy(resolverId = resolver.id, artifactId = "foo-bar")
)
val library2 = createLibrary(org)(
createLibraryForm(org).copy(resolverId = resolver.id, artifactId = "foo-baz")
)
def ids(prefix: String) = {
findAll(
Authorization.All,
organizationId = Some(org.id),
prefix = Some(prefix),
).map(_.id).sorted
}
ids("foo") must equal(Seq(library1.id, library2.id).sorted)
ids("foo-bar") must equal(Seq(library1.id))
ids("foo-baz") must equal(Seq(library2.id))
ids(createTestId()) must be(Nil)
}
"create" must {
"validates empty group id" in {
val form = createLibraryForm(org).copy(groupId = " ")
librariesDao.validate(form) must be(
Seq("Group ID cannot be empty")
)
}
"validates empty artifact id" in {
val form = createLibraryForm(org).copy(artifactId = " ")
librariesDao.validate(form) must be(
Seq("Artifact ID cannot be empty")
)
}
"validates duplicates" in {
val library = createLibrary(org)
val form = createLibraryForm(org).copy(
groupId = library.groupId,
artifactId = library.artifactId
)
librariesDao.validate(form) must be(
Seq("Library with this group id and artifact id already exists")
)
}
}
"authorization" must {
"allow anybody to access a public library" in {
val user = createUser()
val org = createOrganization(user = user)
val resolver = createResolver(org, user) (
createResolverForm(org).copy(visibility = Visibility.Public)
)
val lib = createLibrary(org, user = user)(createLibraryForm(org)(resolver = resolver))
findAll(Authorization.PublicOnly, id = Some(lib.id)).map(_.id) must be(Seq(lib.id))
findAll(id = Some(lib.id)).map(_.id) must be(Seq(lib.id))
findAll(Authorization.Organization(org.id), id = Some(lib.id)).map(_.id) must be(Seq(lib.id))
findAll(Authorization.Organization(createOrganization().id), id = Some(lib.id)).map(_.id) must be(Seq(lib.id))
findAll(Authorization.User(user.id), id = Some(lib.id)).map(_.id) must be(Seq(lib.id))
}
"allow only users of an org to access a library w/ a private resolver" in {
val user = createUser()
val org = createOrganization(user = user)
val resolver = createResolver(org, user) (
createResolverForm(org).copy(visibility = Visibility.Private)
)
val lib = createLibrary(org, user = user)(createLibraryForm(org)(resolver = resolver))
lib.resolver.visibility must be(Visibility.Private)
findAll(Authorization.PublicOnly, id = Some(lib.id))must be(Nil)
findAll(id = Some(lib.id)).map(_.id) must be(Seq(lib.id))
findAll(Authorization.Organization(org.id), id = Some(lib.id)).map(_.id) must be(Seq(lib.id))
findAll(Authorization.Organization(createOrganization().id), id = Some(lib.id))must be(Nil)
findAll(Authorization.User(user.id), id = Some(lib.id)).map(_.id) must be(Seq(lib.id))
findAll(Authorization.User(createUser().id), id = Some(lib.id)) must be(Nil)
}
}
}
| flowcommerce/dependency | api/test/db/LibrariesDaoSpec.scala | Scala | mit | 5,755 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.eval.tracing
import monix.eval.Coeval
import scala.reflect.NameTransformer
/**
* All Credits to https://github.com/typelevel/cats-effect and https://github.com/RaasAhsan
*/
final case class CoevalTrace(events: List[CoevalEvent], captured: Int, omitted: Int) {
import CoevalTrace._
def printFiberTrace(options: PrintingOptions = PrintingOptions.Default): Coeval[Unit] =
Coeval(System.err.println(showFiberTrace(options)))
def showFiberTrace(options: PrintingOptions = PrintingOptions.Default): String = {
val TurnRight = "╰"
val InverseTurnRight = "╭"
val Junction = "├"
val Line = "│"
val acc0 = s"CoevalTrace: $captured frames captured\\n"
if (options.showFullStackTraces) {
val stackTraces = events.collect { case e: CoevalEvent.StackTrace => e }
val acc1 = stackTraces.zipWithIndex
.map {
case (st, index) =>
val tag = getOpAndCallSite(st.stackTrace)
.map {
case (methodSite, _) =>
NameTransformer.decode(methodSite.getMethodName)
}
.getOrElse("(...)")
val op = if (index == 0) s"$InverseTurnRight $tag\\n" else s"$Junction $tag\\n"
val relevantLines = st.stackTrace
.slice(options.ignoreStackTraceLines, options.ignoreStackTraceLines + options.maxStackTraceLines)
val lines = relevantLines.zipWithIndex
.map {
case (ste, i) =>
val junc = if (i == relevantLines.length - 1) TurnRight else Junction
val codeLine = renderStackTraceElement(ste)
s"$Line $junc $codeLine"
}
.mkString("", "\\n", "\\n")
s"$op$lines$Line"
}
.mkString("\\n")
val acc2 = if (omitted > 0) {
"\\n" + TurnRight + s" ... ($omitted frames omitted)\\n"
} else "\\n" + TurnRight + "\\n"
acc0 + acc1 + acc2
} else {
val acc1 = events.zipWithIndex
.map {
case (event, index) =>
val junc = if (index == events.length - 1 && omitted == 0) TurnRight else Junction
val message = event match {
case ev: CoevalEvent.StackTrace => {
getOpAndCallSite(ev.stackTrace)
.map {
case (methodSite, callSite) =>
val loc = renderStackTraceElement(callSite)
val op = NameTransformer.decode(methodSite.getMethodName)
s"$op @ $loc"
}
.getOrElse("(...)")
}
}
s" $junc $message"
}
.mkString(acc0, "\\n", "")
val acc2 = if (omitted > 0) {
acc1 + "\\n " + TurnRight + s" ... ($omitted frames omitted)"
} else acc1
acc2 + "\\n"
}
}
}
private[eval] object CoevalTrace {
def getOpAndCallSite(frames: List[StackTraceElement]): Option[(StackTraceElement, StackTraceElement)] =
frames
.sliding(2)
.collect {
case a :: b :: Nil => (a, b)
}
.find {
case (_, callSite) =>
!stackTraceFilter.exists(callSite.getClassName.startsWith(_))
}
private def renderStackTraceElement(ste: StackTraceElement): String = {
val methodName = demangleMethod(ste.getMethodName)
s"${ste.getClassName}.$methodName (${ste.getFileName}:${ste.getLineNumber})"
}
private def demangleMethod(methodName: String): String =
anonfunRegex.findFirstMatchIn(methodName) match {
case Some(mat) => mat.group(1)
case None => methodName
}
private[this] val anonfunRegex = "^\\\\$+anonfun\\\\$+(.+)\\\\$+\\\\d+$".r
private[this] val stackTraceFilter = List(
"monix.",
"cats.effect.",
"cats.",
"sbt.",
"java.",
"sun.",
"scala."
)
}
| monifu/monifu | monix-eval/shared/src/main/scala/monix/eval/tracing/CoevalTrace.scala | Scala | apache-2.0 | 4,539 |
package bootstrap.liftweb
import _root_.net.liftweb.common._
import _root_.net.liftweb.util._
import _root_.net.liftweb.http._
import _root_.net.liftweb.sitemap._
import _root_.net.liftweb.sitemap.Loc._
import Helpers._
import _root_.net.liftweb.mapper._
import _root_.java.sql.{Connection, DriverManager}
import _root_.com.helloscalate.model._
import net.liftweb.scalate.ScalateView
/**
* A class that's instantiated early and run. It allows the application
* to modify lift's environment
*/
class Boot {
def boot {
// add the connection manager if there's not already a JNDI connection defined
if (!DB.jndiJdbcConnAvailable_?) DB.defineConnectionManager(DefaultConnectionIdentifier, DBVendor)
// lets add Scalate
val scalateView = new ScalateView
scalateView.register
// Update the database schema to be in sync
Schemifier.schemify(true, Schemifier.infoF _, User)
// The locale is either calculated based on the incoming user or
// based on the http request
LiftRules.localeCalculator = r => User.currentUser.map(_.locale.isAsLocale).openOr(LiftRules.defaultLocaleCalculator(r))
// Build SiteMap
/*
val entries = Menu(Loc("Home", List("index"), "Home")) ::
Menu(Loc("Request Details", List("request"), "Request Details")) ::
User.sitemap
LiftRules.setSiteMap(SiteMap(entries:_*))
*/
}
}
object DBVendor extends ConnectionManager {
def newConnection(name: ConnectionIdentifier): Box[Connection] = {
try {
Class.forName("org.apache.derby.jdbc.EmbeddedDriver")
val dm = DriverManager.getConnection("jdbc:derby:lift_example;create=true")
Full(dm)
} catch {
case e : Exception => e.printStackTrace; Empty
}
}
def releaseConnection(conn: Connection) {conn.close}
}
| wsaccaco/lift | examples/helloscalate/src/main/scala/bootstrap/liftweb/Boot.scala | Scala | apache-2.0 | 1,783 |
import sbt._
import scala.collection.JavaConverters._
import org.mozilla.javascript.tools.shell.{Global}
import org.mozilla.javascript.{Context,Scriptable,ScriptableObject,Callable,NativeObject}
import ScriptableObject.READONLY
import com.google.javascript.jscomp.{Compiler,CompilerOptions,SourceFile,VariableRenamingPolicy}
case class RequireJSWrap(startFile: File, endFile: File) {
def toJsObject(scope: Scriptable): Scriptable = {
val ctx = Context.getCurrentContext()
val obj = ctx.newObject(scope)
ScriptableObject.defineProperty(obj, "startFile", startFile.getPath, READONLY)
ScriptableObject.defineProperty(obj, "endFile", endFile.getPath, READONLY)
obj
}
}
case class RequireJSConfig(
logLevel: Int,
baseUrl: File,
mainConfigFile: File,
name: String,
include: List[String],
wrapShim: Boolean,
wrap: RequireJSWrap,
optimize: String,
out: File) {
def toJsObject(scope: Scriptable): Scriptable = {
val ctx = Context.getCurrentContext()
val obj = ctx.newObject(scope)
val include = ctx.newArray(scope, this.include.toArray: Array[AnyRef])
ScriptableObject.defineProperty(obj, "logLevel", logLevel, READONLY)
ScriptableObject.defineProperty(obj, "baseUrl", baseUrl.getPath, READONLY)
ScriptableObject.defineProperty(obj, "mainConfigFile", mainConfigFile.getPath, READONLY)
ScriptableObject.defineProperty(obj, "name", name, READONLY)
ScriptableObject.defineProperty(obj, "include", include, READONLY)
ScriptableObject.defineProperty(obj, "wrapShim", wrapShim, READONLY)
ScriptableObject.defineProperty(obj, "wrap", wrap.toJsObject(scope), READONLY)
ScriptableObject.defineProperty(obj, "optimize", optimize, READONLY)
ScriptableObject.defineProperty(obj, "out", out.getPath, READONLY)
obj
}
}
class RequireJS(log: Logger) extends Rhino {
def rjsScope(ctx: Context): Scriptable = {
val global = new Global()
global.init(ctx)
val scope = ctx.initStandardObjects(global, true)
val arguments = ctx.newArray(scope, Array[AnyRef]())
scope.defineProperty("arguments", arguments, ScriptableObject.DONTENUM)
scope.defineProperty("requirejsAsLib", true, ScriptableObject.DONTENUM)
val rjs = new java.io.InputStreamReader(
getClass.getClassLoader.getResourceAsStream("r.js"))
ctx.evaluateReader(scope, rjs, "r.js", 1, null)
scope
}
def optimize(config: RequireJSConfig): (File, File) = {
withContext { ctx =>
log.info(s"Optimizing and minifying sbt-requirejs source ${config.out}")
val scope = rjsScope(ctx)
val require = scope.get("require", scope).asInstanceOf[Scriptable]
val optimize = require.get("optimize", scope).asInstanceOf[Callable]
val args = Array[AnyRef](config.toJsObject(scope))
optimize.call(ctx, scope, scope, args)
val output = config.out
val outputMin = file(output.getPath.stripSuffix("js") + "min.js")
IO.copyFile(output, outputMin)
// val outputMin = minify(output)
(output, outputMin)
}
}
def minify(input: File): File = {
val output = file(input.getPath.stripSuffix("js") + "min.js")
val compiler = new Compiler
val externs = Nil: List[SourceFile]
val sources = SourceFile.fromFile(input) :: Nil
val options = new CompilerOptions
options.setLanguageIn(CompilerOptions.LanguageMode.ECMASCRIPT5)
options.variableRenaming = VariableRenamingPolicy.ALL
options.prettyPrint = false
val result = compiler.compile(externs.asJava, sources.asJava, options)
if (result.errors.nonEmpty) {
result.errors.foreach(error => log.error(error.toString))
sys.error(s"${result.errors.length} errors compiling $input")
} else {
// val warnings = result.warnings.filter(_.getType().key != "JSC_BAD_JSDOC_ANNOTATION")
// warnings.foreach(warning => log.warn(warning.toString))
IO.write(output, compiler.toSource)
output
}
}
}
| sahat/bokeh | project/src/main/scala/RequireJsPlugin.scala | Scala | bsd-3-clause | 4,254 |
package com.mesosphere.cosmos.repository
import com.mesosphere.cosmos.HttpClient
import com.mesosphere.cosmos.error.CosmosException
import com.mesosphere.cosmos.error.RepositoryUriConnection
import com.mesosphere.cosmos.error.RepositoryUriSyntax
import com.mesosphere.cosmos.error.UniverseClientHttpError
import com.mesosphere.cosmos.rpc.v1.model.PackageRepository
import com.mesosphere.cosmos.test.CosmosIntegrationTestClient
import com.mesosphere.universe
import io.lemonlabs.uri.Uri
import io.lemonlabs.uri.dsl._
import com.twitter.util.Await
import com.twitter.util.Throw
import io.netty.handler.codec.http.HttpResponseStatus
import java.io.IOException
import java.net.MalformedURLException
import java.net.UnknownHostException
import org.scalatest.FreeSpec
import org.scalatest.Matchers
final class UniverseClientSpec extends FreeSpec with Matchers {
"UniverseClient" - {
val universeClient = new DefaultUniverseClient(CosmosIntegrationTestClient.adminRouter)
val version1Dot8 = {
val (major, minor) = (1, 8)
universe.v3.model.DcosReleaseVersion(
universe.v3.model.DcosReleaseVersion.Version(major),
List(universe.v3.model.DcosReleaseVersion.Version(minor))
)
}
val baseRepoUri = "https://downloads.mesosphere.com/universe/dce867e9af73b85172d5a36bf8114c69b3be024e"
def repository(repoFilename: String): PackageRepository = {
PackageRepository("repo", baseRepoUri / repoFilename)
}
def v4Repository(repoFilename: String): PackageRepository = {
val baseRepoUri = "https://downloads.mesosphere.com/universe/ebdcd8b7522e37f33184d343ae2a02ad0b63903b/repo"
PackageRepository("repo", baseRepoUri / repoFilename)
}
"apply()" - {
"URI/URL syntax" - {
"relative URI" in {
val expectedRepo = PackageRepository(name = "FooBar", uri = Uri.parse("foo/bar"))
val Throw(CosmosException(RepositoryUriSyntax(actualRepo, _), _, Some(causedBy))) =
Await.result(universeClient(expectedRepo, version1Dot8).liftToTry)
assertResult(expectedRepo)(actualRepo)
assert(causedBy.isInstanceOf[IllegalArgumentException])
}
"unknown protocol" in {
val expectedRepo = PackageRepository(name = "FooBar", uri = Uri.parse("foo://bar.com"))
val Throw(CosmosException(RepositoryUriSyntax(actualRepo, _), _, Some(causedBy))) =
Await.result(universeClient(expectedRepo, version1Dot8).liftToTry)
assertResult(expectedRepo)(actualRepo)
assert(causedBy.isInstanceOf[MalformedURLException])
}
}
"Connection failure" in {
val expectedRepo = PackageRepository(name = "BadRepo", uri = Uri.parse("http://foobar"))
val Throw(CosmosException(RepositoryUriConnection(actualRepo, _), _, Some(causedBy))) =
Await.result(universeClient(expectedRepo, version1Dot8).liftToTry)
assertResult(expectedRepo)(actualRepo)
assert(causedBy.isInstanceOf[IOException])
}
}
"should be able to fetch" - {
"1.10 json" in {
val version = universe.v3.model.DcosReleaseVersionParser.parseUnsafe("1.10")
val repoFilename = "repo-up-to-1.10.json"
val repository = v4Repository(repoFilename)
val repo = Await.result(universeClient(repository, version))
getVersions(repo, "helloworld") shouldBe
List(universe.v3.model.Version("0.4.0"), universe.v3.model.Version("0.4.1"))
}
"1.8 json" in {
val version = universe.v3.model.DcosReleaseVersionParser.parseUnsafe("1.8-dev")
val repo = Await.result(universeClient(repository("repo-up-to-1.8.json"), version))
assertResult(List(
universe.v3.model.Version("0.2.0-1"),
universe.v3.model.Version("0.2.0-2")
))(
getVersions(repo, "cassandra")
)
}
"1.7 json" in {
val version = universe.v3.model.DcosReleaseVersionParser.parseUnsafe("1.7")
val repo = Await.result(universeClient(repository("repo-empty-v3.json"), version))
assert(repo.packages.isEmpty)
}
}
"should fail to fetch a nonexistent repo file" in {
val version = universe.v3.model.DcosReleaseVersionParser.parseUnsafe("0.0")
val repoUri = baseRepoUri / "doesnotexist.json"
val expectedPkgRepo = PackageRepository("badRepo", repoUri)
val result = universeClient(expectedPkgRepo, version)
val Throw(
CosmosException(UniverseClientHttpError(actualPkgRepo, method, clientStatus, status), _, _)
) = Await.result(
result.liftToTry
)
assertResult("GET")(method.getName)
assertResult(expectedPkgRepo)(actualPkgRepo)
assertResult(HttpResponseStatus.FORBIDDEN)(clientStatus)
assertResult(HttpResponseStatus.INTERNAL_SERVER_ERROR)(status)
}
"should retry before failing to fetch a bad host" in {
val version = universe.v3.model.DcosReleaseVersionParser.parseUnsafe("0.0")
val repoUri = "https://something-that-is-never.valid" / "doesnotexist.json"
val expectedPkgRepo = PackageRepository("badRepo", repoUri)
val result = universeClient(expectedPkgRepo, version).liftToTry
assertThrows[com.twitter.util.TimeoutException](
Await.result(
result,
// We verify the future is retrying by ensuring it is not complete before retry duration
HttpClient.RETRY_INTERVAL * (HttpClient.DEFAULT_RETRIES - 1).toLong
)
)
val Throw(ex) = Await.result(result)
assert(ex.isInstanceOf[CosmosException])
val cosmosException = ex.asInstanceOf[CosmosException]
assert(cosmosException.error.isInstanceOf[RepositoryUriConnection])
cosmosException.causedBy shouldBe defined
assert(cosmosException.causedBy.get.isInstanceOf[UnknownHostException])
}
}
private[this] def getVersions(
repository: universe.v4.model.Repository,
name: String
): List[universe.v3.model.Version] = {
repository.packages
.filter(_.name == name)
.sorted
.map(_.version)
}
}
| dcos/cosmos | cosmos-integration-tests/src/main/scala/com/mesosphere/cosmos/repository/UniverseClientSpec.scala | Scala | apache-2.0 | 6,096 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
package squants.photo
import squants._
import squants.time.{ Seconds, TimeIntegral }
/**
* @author garyKeorkunian
* @since 0.1
*
* @param value value in [[squants.photo.LumenSeconds]]
*/
final class LuminousEnergy private (val value: Double, val unit: LuminousEnergyUnit)
extends Quantity[LuminousEnergy]
with TimeIntegral[LuminousFlux] {
def dimension = LuminousEnergy
protected def timeDerived = Lumens(toLumenSeconds)
protected[squants] def time = Seconds(1)
def toLumenSeconds = to(LumenSeconds)
}
object LuminousEnergy extends Dimension[LuminousEnergy] {
private[photo] def apply[A](n: A, unit: LuminousEnergyUnit)(implicit num: Numeric[A]) = new LuminousEnergy(num.toDouble(n), unit)
def apply(value: Any) = parse(value)
def name = "LuminousEnergy"
def primaryUnit = LumenSeconds
def siUnit = LumenSeconds
def units = Set(LumenSeconds)
}
trait LuminousEnergyUnit extends UnitOfMeasure[LuminousEnergy] with UnitConverter {
def apply[A](n: A)(implicit num: Numeric[A]) = LuminousEnergy(num.toDouble(n), this)
}
object LumenSeconds extends LuminousEnergyUnit with PrimaryUnit with SiUnit {
val symbol = "lm⋅s"
}
object LuminousEnergyConversions {
lazy val lumenSecond = LumenSeconds(1)
implicit class LuminousEnergyConversions[A](n: A)(implicit num: Numeric[A]) {
def lumenSeconds = LumenSeconds(n)
}
implicit object LuminousEnergyNumeric extends AbstractQuantityNumeric[LuminousEnergy](LuminousEnergy.primaryUnit)
}
| typelevel/squants | shared/src/main/scala/squants/photo/LuminousEnergy.scala | Scala | apache-2.0 | 2,010 |
// Databricks notebook source
// MAGIC %md
// MAGIC ScaDaMaLe Course [site](https://lamastex.github.io/scalable-data-science/sds/3/x/) and [book](https://lamastex.github.io/ScaDaMaLe/index.html)
// COMMAND ----------
// MAGIC %md
// MAGIC # Detecting Persons of Interest to OIL/GAS Price Trends
// MAGIC
// MAGIC Johannes Graner ([LinkedIn](https://www.linkedin.com/in/johannes-graner-475677129/)), Albert Nilsson ([LinkedIn](https://www.linkedin.com/in/albert-nilsson-09b62b191/)) and Raazesh Sainudiin ([LinkedIn](https://www.linkedin.com/in/raazesh-sainudiin-45955845/))
// MAGIC
// MAGIC 2020, Uppsala, Sweden
// MAGIC
// MAGIC This project was supported by Combient Mix AB through summer internships at:
// MAGIC
// MAGIC Combient Competence Centre for Data Engineering Sciences,
// MAGIC Department of Mathematics,
// MAGIC Uppsala University, Uppsala, Sweden
// MAGIC
// MAGIC ---
// MAGIC
// MAGIC
// MAGIC Here we will build a pipeline to investigate possible persons, organisations and other entities related to oil and gas that are reported in mass media around the world and their possible co-occurrence with certain trends and trend-reversals in oil price.
// MAGIC
// MAGIC
// MAGIC ***Steps:***
// MAGIC
// MAGIC - Step 0. Setting up and loading GDELT delta.io tables
// MAGIC - Step 1. Create a graph of persons related to gas and oil
// MAGIC - Step 2. Extract communties
// MAGIC - Step 3. Find key Influencers
// MAGIC - Step 4. Visualisation
// MAGIC
// MAGIC **Resources:**
// MAGIC
// MAGIC This builds on the following libraries and its antecedents therein:
// MAGIC
// MAGIC - [https://github.com/aamend/spark-gdelt](https://github.com/aamend/spark-gdelt)
// MAGIC - [https://github.com/lamastex/spark-trend-calculus](https://github.com/lamastex/spark-trend-calculus)
// MAGIC
// MAGIC
// MAGIC **This work was inspired by:**
// MAGIC
// MAGIC - Antoine Aamennd's [texata-2017](https://github.com/aamend/texata-r2-2017)
// MAGIC - Andrew Morgan's [Trend Calculus Library](https://github.com/ByteSumoLtd/TrendCalculus-lua)
// COMMAND ----------
// MAGIC %md
// MAGIC ## Step 0. Setting up and loading GDELT delta.io tables
// COMMAND ----------
// DBTITLE 0,Import packages
import spark.implicits._
import io.delta.tables._
import com.aamend.spark.gdelt._
import org.apache.spark.sql.{Dataset,DataFrame,SaveMode}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.expressions._
import org.graphframes.GraphFrame
import org.apache.spark.sql.SparkSession
// COMMAND ----------
// MAGIC %run "./000b_gdelt_utils"
// COMMAND ----------
val rootPath = GdeltUtils.getGdeltV1Path
val rootCheckpointPath = GdeltUtils.getPOICheckpointPath
val gkgPath = rootPath+"gkg"
// COMMAND ----------
val gkg_v1 = spark.read.format("delta").load(gkgPath).as[GKGEventV1]
// COMMAND ----------
val gkg_v1_filt = gkg_v1.filter($"publishDate">"2013-04-01 00:00:00" && $"publishDate"<"2019-12-31 00:00:00")
val oil_gas_themeGKG = gkg_v1_filt.filter(c =>c.themes.contains("ENV_GAS") || c.themes.contains("ENV_OIL"))
// COMMAND ----------
// MAGIC %md
// MAGIC ## Step 1. Create a graph of people related to gas and oil
// COMMAND ----------
// MAGIC %md
// MAGIC The first step is to create a graph between people releated to gas and oil where the edges are number of articles that they are mentioned in together.
// COMMAND ----------
// MAGIC %md
// MAGIC **Create the GraphFrame of Interest**
// COMMAND ----------
// DBTITLE 0,Creats the graph.
val edges = oil_gas_themeGKG.select($"persons",$"numArticles")
.withColumn("src",explode($"persons"))
.withColumn("dst",explode($"persons"))
.filter($"src".notEqual($"dst") && $"src" =!= "" && $"dst" =!= "")
.groupBy($"src",$"dst")
.agg(sum("numArticles").as("count"))
.toDF()
val vertices = oil_gas_themeGKG.select($"persons",$"numArticles")
.withColumn("id",explode($"persons"))
.filter($"id" =!= "")
.drop($"persons")
.groupBy($"id")
.agg(sum("numArticles").as("numArticles"))
.toDF()
val pers_graph = GraphFrame(vertices,edges)
// COMMAND ----------
// MAGIC %md
// MAGIC **Count how many vertices and edges there is in our graph**
// COMMAND ----------
// DBTITLE 0,Counts how many vertices and edges there is in our graph
println("vertex count: " +pers_graph.vertices.count())
println("edge count: " + pers_graph.edges.count())
// COMMAND ----------
val fil_pers_graph = pers_graph.filterEdges($"count" >10).dropIsolatedVertices()
// COMMAND ----------
// MAGIC %md
// MAGIC **Count how many vertices and edges are in our graph after being filtered**
// COMMAND ----------
// DBTITLE 0,Counts how many vertices and edges there is in our graph after being filtered
println("filtered vertex count: " +fil_pers_graph.vertices.count())
println("filtered edge count: " + fil_pers_graph.edges.count())
// COMMAND ----------
sc.setCheckpointDir(rootCheckpointPath)
// COMMAND ----------
// MAGIC %md
// MAGIC **Compute the connected components**
// COMMAND ----------
// DBTITLE 0,Computes the connected components
val comp_vertices = fil_pers_graph.connectedComponents.run()
// COMMAND ----------
// MAGIC %md
// MAGIC **Checkpoint**
// COMMAND ----------
// DBTITLE 0,Checkpoint
comp_vertices.write.parquet(rootCheckpointPath+"comp_vertices")
// COMMAND ----------
val comp_vertices = spark.read.parquet(rootCheckpointPath+"comp_vertices")
val comp_graph = GraphFrame(comp_vertices,fil_pers_graph.edges)
// COMMAND ----------
// MAGIC %md
// MAGIC Note that almost all edges and vertices are in the connected component labelled 0, our giant component.
// COMMAND ----------
// DBTITLE 0,We can see that almost everything is in the connected component 0
comp_graph.vertices.groupBy($"component").agg(count("component").as("count")).orderBy(desc("count")).show()
// COMMAND ----------
// MAGIC %md
// MAGIC **Filter out the graph to only focus on the giant component**
// COMMAND ----------
// DBTITLE 0,Filter out the graph looking only at the big component
val big_comp_graph = comp_graph.filterVertices($"component" === 0)
// COMMAND ----------
// MAGIC %md
// MAGIC ## Step 2. Extract communities
// MAGIC
// MAGIC Next, let us extract communities within the giant component.
// MAGIC
// MAGIC There are many algorithms for community structure detection:
// MAGIC
// MAGIC - [https://en.wikipedia.org/wiki/Community_structure](https://en.wikipedia.org/wiki/Community_structure)
// MAGIC
// MAGIC We use a simple scalable one via label propagation here.
// COMMAND ----------
// MAGIC %md
// MAGIC **Apply label propagation to find interesting community structures**
// COMMAND ----------
// DBTITLE 0,Applying label propagation to find important communites
val label_vertices = big_comp_graph.labelPropagation.maxIter(10).run()
// COMMAND ----------
// MAGIC %md
// MAGIC **Checkpoint**
// COMMAND ----------
// DBTITLE 0,Checkpoint
label_vertices.write.parquet(rootCheckpointPath+"label_vertices")
big_comp_graph.edges.write.parquet(rootCheckpointPath+"label_edges")
// COMMAND ----------
val label_vertices = spark.read.parquet(rootCheckpointPath+"label_vertices")
val label_edges = spark.read.parquet(rootCheckpointPath+"label_edges")
// COMMAND ----------
val label_graph = GraphFrame(label_vertices,label_edges)
// COMMAND ----------
// MAGIC %md
// MAGIC ## Step 3. Find key Influencers
// MAGIC
// MAGIC **Apply page rank to find the key influencers**
// COMMAND ----------
// DBTITLE 0,Applying page rank to find the key influencers
val com_rank_graph =label_graph.pageRank.resetProbability(0.15).tol(0.015).run()
// COMMAND ----------
// MAGIC %md
// MAGIC **Checkpoint**
// COMMAND ----------
// DBTITLE 0,Checkpoint
com_rank_graph.vertices.write.parquet(rootCheckpointPath+"com_rank_vertices")
com_rank_graph.edges.write.parquet(rootCheckpointPath+"com_rank_edges")
// COMMAND ----------
val com_rank_vertices = spark.read.parquet(rootCheckpointPath+"com_rank_vertices")
val com_rank_edges =spark.read.parquet(rootCheckpointPath+"com_rank_edges")
val com_rank_graph = GraphFrame(com_rank_vertices,com_rank_edges)
// COMMAND ----------
com_rank_graph.vertices.groupBy($"label").agg(count($"label").as("count")).orderBy(desc("count")).show()
// COMMAND ----------
// MAGIC %md
// MAGIC ## Step 4. Visualisation
// MAGIC
// MAGIC **Look at the top three communities**
// COMMAND ----------
// DBTITLE 0,looked at three top communities
val toplabel1 = com_rank_graph.filterVertices($"label" === 1520418423783L)
val toplabel2 = com_rank_graph.filterVertices($"label" === 8589934959L)
val toplabel3 =com_rank_graph.filterVertices($"label" === 1580547965452L)
// COMMAND ----------
// MAGIC %md
// MAGIC **Filter out the top 100 according to pagerank score**
// COMMAND ----------
// DBTITLE 0,Filter out the top 100 according to pagerank score
val toplabel1Filt = toplabel1.filterVertices($"pagerank" >=55.47527731815801)
// COMMAND ----------
// MAGIC %md
// MAGIC **Filter out edges to make the graph more comprehensive**
// COMMAND ----------
// DBTITLE 0,Filter out edges to make the graph more comprehensive.
val toplabel1FiltE = toplabel1Filt.filterEdges($"count">2000).dropIsolatedVertices()
// COMMAND ----------
// MAGIC %md
// MAGIC In the interactive d3 graph below, the size of circle correlates with pagerank score.
// COMMAND ----------
// DBTITLE 0,Size of circle correlates with pagerank score.
case class Edge(src: String, dst: String, count: Long)
case class Node(name: String,importance: Double)
case class Link(source: Int, target: Int, value: Long)
case class Graph(nodes: Seq[Node], links: Seq[Link])
object graphs {
val sqlContext = SparkSession.builder().getOrCreate().sqlContext
import sqlContext.implicits._
def force(vertices: Dataset[Node],clicks: Dataset[Edge], height: Int = 100, width: Int = 960): Unit = {
val data = clicks.collect()
val nodes = vertices.collect()
val links = data.map { t =>
Link(nodes.indexWhere(_.name == t.src.replaceAll("_", " ")), nodes.indexWhere(_.name == t.dst.replaceAll("_", " ")), t.count / 20 + 1)
}
showGraph(height, width, Seq(Graph(nodes, links)).toDF().toJSON.first())
}
/**
* Displays a force directed graph using d3
* input: {"nodes": [{"name": "..."}], "links": [{"source": 1, "target": 2, "value": 0}]}
*/
def showGraph(height: Int, width: Int, graph: String): Unit = {
displayHTML(s"""
<style>
.node_circle {
stroke: #777;
stroke-width: 1.3px;
}
.node_label {
pointer-events: none;
}
.link {
stroke: #777;
stroke-opacity: .2;
}
.node_count {
stroke: #777;
stroke-width: 1.0px;
fill: #999;
}
text.legend {
font-family: Verdana;
font-size: 13px;
fill: #000;
}
.node text {
font-family: "Helvetica Neue","Helvetica","Arial",sans-serif;
font-size: function(d) {return (d.importance)+ "px"};
font-weight: 200;
}
</style>
<div id="clicks-graph">
<script src="//d3js.org/d3.v3.min.js"></script>
<script>
var graph = $graph;
var width = $width,
height = $height;
var color = d3.scale.category20();
var force = d3.layout.force()
.charge(-200)
.linkDistance(350)
.size([width, height]);
var svg = d3.select("#clicks-graph").append("svg")
.attr("width", width)
.attr("height", height);
force
.nodes(graph.nodes)
.links(graph.links)
.start();
var link = svg.selectAll(".link")
.data(graph.links)
.enter().append("line")
.attr("class", "link")
.style("stroke-width", function(d) { return Math.sqrt(d.value)/10; });
var node = svg.selectAll(".node")
.data(graph.nodes)
.enter().append("g")
.attr("class", "node")
.call(force.drag);
node.append("circle")
.attr("r", function(d) { return Math.sqrt(d.importance); })
.style("fill", function (d) {
if (d.name.startsWith("other")) { return color(1); } else { return color(2); };
})
node.append("text")
.attr("dx", function(d) { return (Math.sqrt(d.importance)*30)/Math.sqrt(1661.1815574713858); })
.attr("dy", ".35em")
.text(function(d) { return d.name });
//Now we are giving the SVGs co-ordinates - the force layout is generating the co-ordinates which this code is using to update the attributes of the SVG elements
force.on("tick", function () {
link.attr("x1", function (d) {
return d.source.x;
})
.attr("y1", function (d) {
return d.source.y;
})
.attr("x2", function (d) {
return d.target.x;
})
.attr("y2", function (d) {
return d.target.y;
});
d3.selectAll("circle").attr("cx", function (d) {
return d.x;
})
.attr("cy", function (d) {
return d.y;
});
d3.selectAll("text").attr("x", function (d) {
return d.x;
})
.attr("y", function (d) {
return d.y;
});
});
</script>
</div>
""")
}
def help() = {
displayHTML("""
<p>
Produces a force-directed graph given a collection of edges of the following form:</br>
<tt><font color="#a71d5d">case class</font> <font color="#795da3">Edge</font>(<font color="#ed6a43">src</font>: <font color="#a71d5d">String</font>, <font color="#ed6a43">dest</font>: <font color="#a71d5d">String</font>, <font color="#ed6a43">count</font>: <font color="#a71d5d">Long</font>)</tt>
</p>
<p>Usage:<br/>
<tt><font color="#a71d5d">import</font> <font color="#ed6a43">d3._</font></tt><br/>
<tt><font color="#795da3">graphs.force</font>(</br>
<font color="#ed6a43">height</font> = <font color="#795da3">500</font>,<br/>
<font color="#ed6a43">width</font> = <font color="#795da3">500</font>,<br/>
<font color="#ed6a43">clicks</font>: <font color="#795da3">Dataset</font>[<font color="#795da3">Edge</font>])</tt>
</p>""")
}
}
graphs.force(
height = 800,
width = 1200,
clicks = toplabel1FiltE.edges.as[Edge],
vertices = toplabel1FiltE.vertices.select($"id".as("name"),$"pagerank".as("importance")).as[Node]
)
// COMMAND ----------
// MAGIC %md
// MAGIC 
// COMMAND ----------
// MAGIC %md
// MAGIC **Filter out the top 100 according to pagerank score**
// COMMAND ----------
// DBTITLE 0,Filter out the top 100 according to pagerank score
val toplabel2Filt = toplabel2.filterVertices($"pagerank" >=7.410990956624706)
// COMMAND ----------
// MAGIC %md
// MAGIC **Filter out edges to and vertices with small amount of edges to make the graph more comprehensive**
// COMMAND ----------
// DBTITLE 0,Filter out edges to and vertices with small amount of edges to make the graph more comprehensive.
val toplabel2FiltE = toplabel2Filt.filterEdges($"count">136).dropIsolatedVertices()
// COMMAND ----------
// MAGIC %md
// MAGIC **Nigerian cluster**
// COMMAND ----------
// DBTITLE 0,Nigerian cluster.
case class Edge(src: String, dst: String, count: Long)
case class Node(name: String,importance: Double)
case class Link(source: Int, target: Int, value: Long)
case class Graph(nodes: Seq[Node], links: Seq[Link])
object graphs {
// val sqlContext = SQLContext.getOrCreate(org.apache.spark.SparkContext.getOrCreate()) /// fix
val sqlContext = SparkSession.builder().getOrCreate().sqlContext
import sqlContext.implicits._
def force(vertices: Dataset[Node],clicks: Dataset[Edge], height: Int = 100, width: Int = 960): Unit = {
val data = clicks.collect()
val nodes = vertices.collect()
val links = data.map { t =>
Link(nodes.indexWhere(_.name == t.src.replaceAll("_", " ")), nodes.indexWhere(_.name == t.dst.replaceAll("_", " ")), t.count / 20 + 1)
}
showGraph(height, width, Seq(Graph(nodes, links)).toDF().toJSON.first())
}
/**
* Displays a force directed graph using d3
* input: {"nodes": [{"name": "..."}], "links": [{"source": 1, "target": 2, "value": 0}]}
*/
def showGraph(height: Int, width: Int, graph: String): Unit = {
displayHTML(s"""
<style>
.node_circle {
stroke: #777;
stroke-width: 1.3px;
}
.node_label {
pointer-events: none;
}
.link {
stroke: #777;
stroke-opacity: .2;
}
.node_count {
stroke: #777;
stroke-width: 1.0px;
fill: #999;
}
text.legend {
font-family: Verdana;
font-size: 13px;
fill: #000;
}
.node text {
font-family: "Helvetica Neue","Helvetica","Arial",sans-serif;
font-size: function(d) {return (d.importance)+ "px"};
font-weight: 200;
}
</style>
<div id="clicks-graph">
<script src="//d3js.org/d3.v3.min.js"></script>
<script>
var graph = $graph;
var width = $width,
height = $height;
var color = d3.scale.category20();
var force = d3.layout.force()
.charge(-200)
.linkDistance(350)
.size([width, height]);
var svg = d3.select("#clicks-graph").append("svg")
.attr("width", width)
.attr("height", height);
force
.nodes(graph.nodes)
.links(graph.links)
.start();
var link = svg.selectAll(".link")
.data(graph.links)
.enter().append("line")
.attr("class", "link")
.style("stroke-width", function(d) { return Math.sqrt(d.value)/10; });
var node = svg.selectAll(".node")
.data(graph.nodes)
.enter().append("g")
.attr("class", "node")
.call(force.drag);
node.append("circle")
.attr("r", function(d) { return Math.sqrt(d.importance); })
.style("fill", function (d) {
if (d.name.startsWith("other")) { return color(1); } else { return color(2); };
})
node.append("text")
.attr("dx", function(d) { return (Math.sqrt(d.importance)*30)/Math.sqrt(453.6031403843406); })
.attr("dy", ".35em")
.text(function(d) { return d.name });
//Now we are giving the SVGs co-ordinates - the force layout is generating the co-ordinates which this code is using to update the attributes of the SVG elements
force.on("tick", function () {
link.attr("x1", function (d) {
return d.source.x;
})
.attr("y1", function (d) {
return d.source.y;
})
.attr("x2", function (d) {
return d.target.x;
})
.attr("y2", function (d) {
return d.target.y;
});
d3.selectAll("circle").attr("cx", function (d) {
return d.x;
})
.attr("cy", function (d) {
return d.y;
});
d3.selectAll("text").attr("x", function (d) {
return d.x;
})
.attr("y", function (d) {
return d.y;
});
});
</script>
</div>
""")
}
def help() = {
displayHTML("""
<p>
Produces a force-directed graph given a collection of edges of the following form:</br>
<tt><font color="#a71d5d">case class</font> <font color="#795da3">Edge</font>(<font color="#ed6a43">src</font>: <font color="#a71d5d">String</font>, <font color="#ed6a43">dest</font>: <font color="#a71d5d">String</font>, <font color="#ed6a43">count</font>: <font color="#a71d5d">Long</font>)</tt>
</p>
<p>Usage:<br/>
<tt><font color="#a71d5d">import</font> <font color="#ed6a43">d3._</font></tt><br/>
<tt><font color="#795da3">graphs.force</font>(</br>
<font color="#ed6a43">height</font> = <font color="#795da3">500</font>,<br/>
<font color="#ed6a43">width</font> = <font color="#795da3">500</font>,<br/>
<font color="#ed6a43">clicks</font>: <font color="#795da3">Dataset</font>[<font color="#795da3">Edge</font>])</tt>
</p>""")
}
}
graphs.force(
height = 800,
width = 1200,
clicks = toplabel2FiltE.edges.as[Edge],
vertices = toplabel2FiltE.vertices.select($"id".as("name"),$"pagerank".as("importance")).as[Node]
)
// COMMAND ----------
// MAGIC %md
// MAGIC 
// COMMAND ----------
// MAGIC %md
// MAGIC **Filter out the top 100 according to pagerank score**
// COMMAND ----------
// DBTITLE 0,Filter out the top 100 according to pagerank score
val toplabel3Filt = toplabel3.filterVertices($"pagerank" >=3.160183413696083).filterEdges($"count">4*18).dropIsolatedVertices()
// COMMAND ----------
// MAGIC %md
// MAGIC **Filter out edges to and vertices with small amount of edges to make the graph more comprehensive**
// COMMAND ----------
// DBTITLE 0,Filter out edges to and vertices with small amount of edges to make the graph more comprehensive.
val toplabel3FiltE = toplabel3Filt.filterEdges($"count">50).dropIsolatedVertices()
// COMMAND ----------
// MAGIC %md
// MAGIC **Malaysian cluster**
// COMMAND ----------
// DBTITLE 0,Malaysian cluster.
case class Edge(src: String, dst: String, count: Long)
case class Node(name: String,importance: Double)
case class Link(source: Int, target: Int, value: Long)
case class Graph(nodes: Seq[Node], links: Seq[Link])
object graphs {
// val sqlContext = SQLContext.getOrCreate(org.apache.spark.SparkContext.getOrCreate()) /// fix
val sqlContext = SparkSession.builder().getOrCreate().sqlContext
import sqlContext.implicits._
def force(vertices: Dataset[Node],clicks: Dataset[Edge], height: Int = 100, width: Int = 960): Unit = {
val data = clicks.collect()
val nodes = vertices.collect()
val links = data.map { t =>
Link(nodes.indexWhere(_.name == t.src.replaceAll("_", " ")), nodes.indexWhere(_.name == t.dst.replaceAll("_", " ")), t.count / 20 + 1)
}
showGraph(height, width, Seq(Graph(nodes, links)).toDF().toJSON.first())
}
/**
* Displays a force directed graph using d3
* input: {"nodes": [{"name": "..."}], "links": [{"source": 1, "target": 2, "value": 0}]}
*/
def showGraph(height: Int, width: Int, graph: String): Unit = {
displayHTML(s"""
<style>
.node_circle {
stroke: #777;
stroke-width: 1.3px;
}
.node_label {
pointer-events: none;
}
.link {
stroke: #777;
stroke-opacity: .2;
}
.node_count {
stroke: #777;
stroke-width: 1.0px;
fill: #999;
}
text.legend {
font-family: Verdana;
font-size: 13px;
fill: #000;
}
.node text {
font-family: "Helvetica Neue","Helvetica","Arial",sans-serif;
font-size: function(d) {return (d.importance)+ "px"};
font-weight: 200;
}
</style>
<div id="clicks-graph">
<script src="//d3js.org/d3.v3.min.js"></script>
<script>
var graph = $graph;
var width = $width,
height = $height;
var color = d3.scale.category20();
var force = d3.layout.force()
.charge(-200)
.linkDistance(300)
.size([width, height]);
var svg = d3.select("#clicks-graph").append("svg")
.attr("width", width)
.attr("height", height);
force
.nodes(graph.nodes)
.links(graph.links)
.start();
var link = svg.selectAll(".link")
.data(graph.links)
.enter().append("line")
.attr("class", "link")
.style("stroke-width", function(d) { return Math.sqrt(d.value)/3; });
var node = svg.selectAll(".node")
.data(graph.nodes)
.enter().append("g")
.attr("class", "node")
.call(force.drag);
node.append("circle")
.attr("r", function(d) { return (Math.sqrt(d.importance)*30)/Math.sqrt(98.7695771886648); })
.style("fill", function (d) {
if (d.name.startsWith("other")) { return color(1); } else { return color(2); };
})
node.append("text")
.attr("dx", function(d) { return (Math.sqrt(d.importance)*30)/Math.sqrt(26.343032735543023); })
.attr("dy", ".35em")
.text(function(d) { return d.name });
//Now we are giving the SVGs co-ordinates - the force layout is generating the co-ordinates which this code is using to update the attributes of the SVG elements
force.on("tick", function () {
link.attr("x1", function (d) {
return d.source.x;
})
.attr("y1", function (d) {
return d.source.y;
})
.attr("x2", function (d) {
return d.target.x;
})
.attr("y2", function (d) {
return d.target.y;
});
d3.selectAll("circle").attr("cx", function (d) {
return d.x;
})
.attr("cy", function (d) {
return d.y;
});
d3.selectAll("text").attr("x", function (d) {
return d.x;
})
.attr("y", function (d) {
return d.y;
});
});
</script>
</div>
""")
}
def help() = {
displayHTML("""
<p>
Produces a force-directed graph given a collection of edges of the following form:</br>
<tt><font color="#a71d5d">case class</font> <font color="#795da3">Edge</font>(<font color="#ed6a43">src</font>: <font color="#a71d5d">String</font>, <font color="#ed6a43">dest</font>: <font color="#a71d5d">String</font>, <font color="#ed6a43">count</font>: <font color="#a71d5d">Long</font>)</tt>
</p>
<p>Usage:<br/>
<tt><font color="#a71d5d">import</font> <font color="#ed6a43">d3._</font></tt><br/>
<tt><font color="#795da3">graphs.force</font>(</br>
<font color="#ed6a43">height</font> = <font color="#795da3">500</font>,<br/>
<font color="#ed6a43">width</font> = <font color="#795da3">500</font>,<br/>
<font color="#ed6a43">clicks</font>: <font color="#795da3">Dataset</font>[<font color="#795da3">Edge</font>])</tt>
</p>""")
}
}
graphs.force(
height = 800,
width = 1200,
clicks = toplabel3FiltE.edges.as[Edge],
vertices = toplabel3FiltE.vertices.select($"id".as("name"),$"pagerank".as("importance")).as[Node]
)
// COMMAND ----------
// MAGIC %md
// MAGIC  | lamastex/scalable-data-science | dbcArchives/2021/000_9-sds-3-x-trends/030b_gdelt_POI_detection.scala | Scala | unlicense | 25,495 |
package com.alexitc.coinalerts.data.anorm.parsers
import anorm.SqlParser.{int, str}
import anorm.~
import com.alexitc.coinalerts.models._
object ExchangeCurrencyParsers {
import CommonParsers._
val parseCurrencyId = int("currency_id").map(ExchangeCurrencyId.apply)
val parseExchange = str("exchange").map(Exchange.fromDatabaseString)
val parseMarket = str("market").map(Market.from)
val parseCurrency = str("currency").map(Currency.from)
val parseCurrencyName = str("currency_name")(citextToString)
.map(CurrencyName.apply)
.?
.map { _.filter(_.string.nonEmpty) }
val parseExchangeCurrency = (parseCurrencyId ~
parseExchange ~
parseMarket ~
parseCurrency ~
parseCurrencyName).map {
case id ~ exchange ~ marketMaybe ~ currencyMaybe ~ currencyName =>
for {
market <- marketMaybe
currency <- currencyMaybe
} yield ExchangeCurrency(id, exchange, market, currency, currencyName)
}
}
| AlexITC/crypto-coin-alerts | alerts-server/app/com/alexitc/coinalerts/data/anorm/parsers/ExchangeCurrencyParsers.scala | Scala | gpl-3.0 | 960 |
import sbt._
import Keys._
import play.Play.autoImport._
import PlayKeys._
object ApplicationBuild extends Build {
val appName = "play2-cache-sample"
val appVersion = "0.7.4-SNAPSHOT"
val appScalaVersion = "2.11.1"
val appDependencies = Seq(
javaCore, javaJdbc, javaEbean,
"play2-crud" %% "play2-crud" % "0.7.4-SNAPSHOT",
"play2-crud" %% "play2-crud" % "0.7.4-SNAPSHOT" classifier "assets"
)
val main = Project(appName, file(".")).enablePlugins(play.PlayJava).settings(
version := appVersion,
scalaVersion := appScalaVersion,
libraryDependencies ++= appDependencies,
//maven repository
resolvers += "release repository" at "http://hakandilek.github.com/maven-repo/releases/",
resolvers += "snapshot repository" at "http://hakandilek.github.com/maven-repo/snapshots/"
)
}
| hakandilek/play2-crud | samples/play2-cache-sample/project/Build.scala | Scala | mit | 890 |
import scala.tools.partest._
object Test extends DirectTest {
override def code = ""
lazy val global = newCompiler("-usejavacp")
import global._, definitions._
override def show() {
new global.Run()
// Once we plug all of the view gaps, the output should be empty!
checkViews()
}
def isExempt(sym: Symbol) = {
val exempt = Set("view", "repr", "sliceWithKnownDelta", "sliceWithKnownBound", "transform", "filterImpl")
(exempt contains sym.name.decoded)
}
def checkView(viewType: Type, viewLikeType: Type) {
val sep = "=" * 70
println(s"\\n$sep\\nChecking ${viewType.typeSymbol.fullName}\\n$sep")
val termMembers = viewType.nonPrivateMembers.toList filter (_.isTerm) map fullyInitializeSymbol
val inheritedFromGenericCollection
= termMembers filterNot (_.owner.name.decoded contains "ViewLike") filterNot (_.owner == viewType.typeSymbol)
def returnsView(sym: Symbol) = viewType.memberType(sym).finalResultType contains viewType.typeSymbol
val needOverride = inheritedFromGenericCollection filterNot isExempt filter returnsView
val grouped = needOverride.groupBy(_.owner).toSeq.sortBy { case (owner, _) => viewType baseTypeIndex owner }
val report = grouped.map {
case (owner, syms) => s"\\n$owner\\n${"-" * 70}\\n${syms.map(_.defString).sorted.mkString("\\n")}"
}.mkString("\\n")
println(report)
}
def checkViews() {
import collection._
checkView(typeOf[TraversableView[_, _]], typeOf[TraversableViewLike[_, _, _]])
checkView(typeOf[IterableView[_, _]], typeOf[IterableViewLike[_, _, _]])
checkView(typeOf[SeqView[_, _]], typeOf[SeqViewLike[_, _, _]])
checkView(typeOf[mutable.IndexedSeqView[_, _]], typeOf[SeqViewLike[_, _, _]])
checkView(typeOf[immutable.StreamView[_, _]], typeOf[immutable.StreamViewLike[_, _, _]])
// Parallel views not checked, assuming we will drop them in 2.11
}
}
| felixmulder/scala | test/files/run/t4332.scala | Scala | bsd-3-clause | 1,940 |
/**
* Created by slyuan on 17-3-16.
*/
//one--------------------------------------------------------
class BankAccount {
private var account : Int = 0
def deposit(money:Int){
account += money
println(s"...$account yuan in your account")
}
def withdraw(money:Int) ={
assert(money <= account, "the money you withdraw should be less than account")
account -= money
println(s"...$account yuan in your account")
}
def balance = account
}
//two--------------------------------------------------------
class Person4(info:String) {
val _name = info.split("\\\\s+")
val firstName = _name(0)
val lastName = _name(1)
override def toString: String = s"first name $firstName , and last name $lastName"
}
//three--------------------------------------------------------
class Point(val x:Int, val y:Int) {
override def toString = "Point x : " + x + ", y : " + y
}
object Point{
def apply(x: Int, y: Int): Point = new Point(x, y)
}
//four--------------------------------------------------------
final class point(val x: Int, val y: Int) {
override def toString: String = s"(x,y) :($x,$y)"
}
abstract class Shape {
def centerPoint :point
def className = this.getClass.getSimpleName
override def toString: String = s"Shape=>$className, Center=>$centerPoint"
}
class Rectangle(val topLeft:point,val bottomRight:point) extends Shape {
override def centerPoint = new point((bottomRight.x + topLeft.x) / 2, (bottomRight.y + topLeft.y) / 2)
}
class Circle(val centerPoint: point, val radius: Int) extends Shape
//
object WeekTwo extends App {
// val rect = new Rectangle(new point(0,0), new point(10, 10))
// println(rect)
// val x1 = new OrderdedPoint(2, -1)
// val x2 = new OrderdedPoint(2, 1)
// println(x1 < x2)
// println(x1 > x2)
}
//five--------------------------------------------------------
//class OrderdedPoint(x:Int,y:Int) extends java.awt.Point with math.Ordered[java.awt.Point]{
//
//
// def compare(that: java.awt.Point): Int = {
// if (x <= that.x) {
// if(this.x == that.y) {
// if(this.y < that.y) -1
// else if (this.y > that.y) 1
// else 0
// } else -1
// } else 1
// }
//}
| csyuan/leetcode | Algorithms/src/main/scala/WeekTwo.scala | Scala | apache-2.0 | 2,202 |
/*
* Copyright 2015 Heiko Seeberger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.heikoseeberger.akkasse
import akka.stream.stage.{ Context, StatefulStage }
import akka.util.ByteString
private object LineParser {
final val CR = '\\r'.toByte
final val LF = '\\n'.toByte
}
private final class LineParser(maxLineSize: Int) extends StatefulStage[ByteString, String] {
import LineParser._
private var buffer = ByteString.empty
override def initial = new State {
override def onPush(bytes: ByteString, ctx: Context[String]) = {
buffer ++= bytes
val parsedLines = lines().iterator
if (buffer.size > maxLineSize)
ctx.fail(new IllegalStateException(s"maxLineSize of $maxLineSize exceeded!"))
else
emit(parsedLines, ctx)
}
private def lines(): Vector[String] = {
val (lines, nrOfConsumedBytes, _) = (buffer :+ 0)
.zipWithIndex
.sliding(2)
.collect {
case Seq((CR, n), (LF, _)) => (n, 2)
case Seq((CR, n), _) => (n, 1)
case Seq((LF, n), _) => (n, 1)
}
.foldLeft((Vector.empty[String], 0, false)) {
case ((slices, from, false), (until, k)) => (slices :+ buffer.slice(from, until).utf8String, until + k, k == 2)
case ((slices, _, _), (until, _)) => (slices, until + 1, false)
}
buffer = buffer.drop(nrOfConsumedBytes)
lines
}
}
}
| jasonchaffee/akka-sse | akka-sse/src/main/scala/de/heikoseeberger/akkasse/LineParser.scala | Scala | apache-2.0 | 1,955 |
/**
* Global Sensor Networks (GSN) Source Code
* Copyright (c) 2006-2016, Ecole Polytechnique Federale de Lausanne (EPFL)
*
* This file is part of GSN.
*
* GSN is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* GSN is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with GSN. If not, see <http://www.gnu.org/licenses/>.
*
* File: src/ch/epfl/gsn/process/DataProcess.scala
*
* @author Jean-Paul Calbimonte
*
*/
package ch.epfl.gsn.process
import ch.epfl.gsn.data._
import scala.collection.mutable.ArrayBuffer
trait DataProcess {
val name:String
def process(ts:Series):Series
}
class ExponentialSmoothing(alpha:Double) extends DataProcess{
override val name="exp-smoothing"
override def process(ts:Series)={
val doubles=ts.asDoubles
var count=0
var current=0d
val newSeries=doubles.map{d=>
if (count==0)
current=d
else
current=alpha*d+(1-alpha)*current
count+=1
current
}
Series(ts.output,newSeries.toSeq)
}
}
class SimpleMovingAverage(size:Int) extends DataProcess{
override val name="simple-moving-avg"
override def process(ts:Series)={
var sma:Double=0
val window=new ArrayBuffer[Double]()
val doubles=ts.asDoubles
val newSeries=doubles.map{d=>
window+=d
if (window.size==size){
sma=window.sum/size
window.remove(0)
}
sma
}
Series(ts.output,newSeries.toSeq)
}
}
class WeightedMovingAverage(size:Int) extends DataProcess{
override val name="weighted-moving-avg"
override def process(ts:Series)={
var total= 0d
var numer= 0d
var boot=true
val denom=size*(size+1)/2
val window=new ArrayBuffer[Double]()
val doubles=ts.asDoubles
val newSeries=doubles.map{d=>
var wma=0d
window+=d
if (window.size<=size) {
numer=numer+d*window.size
total=total+d
}
if (window.size>size){
numer=numer+size*d-total
total=total+d-window(0)
window.remove(0)
}
wma=numer/denom
if (window.size<size) 0
else wma
}
Series(ts.output,newSeries.toSeq)
}
}
| LSIR/gsn | gsn-tools/src/main/scala/ch/epfl/gsn/process/DataProcess.scala | Scala | gpl-3.0 | 2,633 |
/*
* Copyright 2014 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.locationtech.geomesa.core.data
import java.text.SimpleDateFormat
import java.util.TimeZone
import org.geotools.data._
import org.geotools.data.simple.SimpleFeatureIterator
import org.geotools.factory.Hints
import org.geotools.feature.DefaultFeatureCollection
import org.geotools.filter.text.cql2.CQL
import org.geotools.filter.text.ecql.ECQL
import org.junit.runner.RunWith
import org.locationtech.geomesa.core.index.{AttributeIdxEqualsStrategy, QueryStrategyDecider, SF_PROPERTY_START_TIME}
import org.locationtech.geomesa.feature.AvroSimpleFeatureFactory
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.text.WKTUtils
import org.opengis.feature.simple.SimpleFeature
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class FeatureWritersTest extends Specification {
sequential
val geotimeAttributes = org.locationtech.geomesa.core.index.spec
val sftName = "mutableType"
val sft = SimpleFeatureTypes.createType(sftName, s"name:String:index=true,age:Integer,$geotimeAttributes")
sft.getUserData.put(SF_PROPERTY_START_TIME, "dtg")
val sdf = new SimpleDateFormat("yyyyMMdd")
sdf.setTimeZone(TimeZone.getTimeZone("Zulu"))
val dateToIndex = sdf.parse("20140102")
def createStore: AccumuloDataStore =
// the specific parameter values should not matter, as we
// are requesting a mock data store connection to Accumulo
DataStoreFinder.getDataStore(Map(
"instanceId" -> "mycloud",
"zookeepers" -> "zoo1:2181,zoo2:2181,zoo3:2181",
"user" -> "myuser",
"password" -> "mypassword",
"auths" -> "A,B,C",
"tableName" -> "differentTableFromOtherTests", //note the table needs to be different to prevent testing errors,
"useMock" -> "true")).asInstanceOf[AccumuloDataStore]
"AccumuloFeatureWriter" should {
"provide ability to update a single feature that it wrote and preserve feature IDs" in {
val ds = createStore
ds.createSchema(sft)
val fs = ds.getFeatureSource(sftName).asInstanceOf[AccumuloFeatureStore]
val featureCollection = new DefaultFeatureCollection(sftName, sft)
/* create a feature */
val originalFeature1 = AvroSimpleFeatureFactory.buildAvroFeature(sft, List(), "id1")
val geom = WKTUtils.read("POINT(45.0 49.0)")
originalFeature1.setDefaultGeometry(geom)
originalFeature1.setAttribute("name","fred")
originalFeature1.setAttribute("age",50.asInstanceOf[Any])
/* make sure we ask the system to re-use the provided feature-ID */
originalFeature1.getUserData()(Hints.USE_PROVIDED_FID) = java.lang.Boolean.TRUE
featureCollection.add(originalFeature1)
/* create a second feature */
val originalFeature2 = AvroSimpleFeatureFactory.buildAvroFeature(sft, List(), "id2")
originalFeature2.setDefaultGeometry(geom)
originalFeature2.setAttribute("name","tom")
originalFeature2.setAttribute("age",60.asInstanceOf[Any])
/* make sure we ask the system to re-use the provided feature-ID */
originalFeature2.getUserData()(Hints.USE_PROVIDED_FID) = java.lang.Boolean.TRUE
featureCollection.add(originalFeature2)
/* create a third feature */
val originalFeature3 = AvroSimpleFeatureFactory.buildAvroFeature(sft, List(), "id3")
originalFeature3.setDefaultGeometry(geom)
originalFeature3.setAttribute("name","kyle")
originalFeature3.setAttribute("age",2.asInstanceOf[Any])
/* make sure we ask the system to re-use the provided feature-ID */
originalFeature3.getUserData()(Hints.USE_PROVIDED_FID) = java.lang.Boolean.TRUE
featureCollection.add(originalFeature3)
/* write the feature to the store */
fs.addFeatures(featureCollection)
fs.flush()
val store = ds.getFeatureSource(sftName).asInstanceOf[AccumuloFeatureStore]
/* turn fred into billy */
val filter = CQL.toFilter("name = 'fred'")
store.modifyFeatures(Array("name", "age"), Array("billy", 25.asInstanceOf[AnyRef]), filter)
/* delete kyle */
val deleteFilter = CQL.toFilter("name = 'kyle'")
store.removeFeatures(deleteFilter)
/* query everything */
val cqlFilter = CQL.toFilter("include")
/* Let's read out what we wrote...we should only get tom and billy back out */
val nameAgeMap = getMap[String, Int](getFeatures(sftName, fs, "include"), "name", "age")
nameAgeMap.size mustEqual 2
nameAgeMap should contain( "tom" -> 60)
nameAgeMap should contain( "billy" -> 25)
val featureIdMap = getMap[String, String](getFeatures(sftName, fs, "include"), "name", (sf:SimpleFeature) => sf.getID)
featureIdMap.size mustEqual 2
featureIdMap should contain( "tom" -> "id2")
featureIdMap should contain( "billy" -> "id1")
}
"be able to replace all features in a store using a general purpose FeatureWriter" in {
val ds = createStore
val fs = ds.getFeatureSource(sftName).asInstanceOf[AccumuloFeatureStore]
/* from the test before there are 2 features left over - validate that's true and delete */
countFeatures(fs, sftName) mustEqual 2
val writer = ds.getFeatureWriter(sftName, Transaction.AUTO_COMMIT)
while(writer.hasNext){
writer.next
writer.remove()
}
// cannot do anything here until the writer is closed.
/* repopulate it */
val sftType = ds.getSchema(sftName)
val geom = WKTUtils.read("POINT(45.0 49.0)")
val c = new DefaultFeatureCollection
c.add(AvroSimpleFeatureFactory.buildAvroFeature(sftType, Array("will", 56.asInstanceOf[AnyRef], geom, dateToIndex, null), "fid1"))
c.add(AvroSimpleFeatureFactory.buildAvroFeature(sftType, Array("george", 33.asInstanceOf[AnyRef], geom, dateToIndex, null), "fid2"))
c.add(AvroSimpleFeatureFactory.buildAvroFeature(sftType, Array("sue", 99.asInstanceOf[AnyRef], geom, dateToIndex, null), "fid3"))
c.add(AvroSimpleFeatureFactory.buildAvroFeature(sftType, Array("karen", 50.asInstanceOf[AnyRef], geom, dateToIndex, null), "fid4"))
c.add(AvroSimpleFeatureFactory.buildAvroFeature(sftType, Array("bob", 56.asInstanceOf[AnyRef], geom, dateToIndex, null), "fid5"))
val ids = c.map { f => f.getID}
try {
c.zip(ids).foreach { case (feature, id) =>
val writerCreatedFeature = writer.next()
writerCreatedFeature.setAttributes(feature.getAttributes)
writerCreatedFeature.getUserData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE)
writerCreatedFeature.getUserData.put(Hints.PROVIDED_FID, id)
writer.write()
}
} finally {
writer.close()
}
countFeatures(fs, sftName) mustEqual 5
/* this tests the Hints.PROVIDED_FID feature */
val featureIdMap = getMap[String, String](getFeatures(sftName, fs, "include"), "name", (sf: SimpleFeature) => sf.getID)
featureIdMap.size mustEqual 5
featureIdMap should contain("will" -> "fid1")
featureIdMap should contain("george" -> "fid2")
featureIdMap should contain("sue" -> "fid3")
featureIdMap should contain("karen" -> "fid4")
featureIdMap should contain("bob" -> "fid5")
}
"be able to update all features based on some ecql or something" in {
val ds = createStore
val fs = ds.getFeatureSource(sftName).asInstanceOf[AccumuloFeatureStore]
val filter = CQL.toFilter("(age > 50 AND age < 99) or (name = 'karen')")
fs.modifyFeatures(Array("age"), Array(60.asInstanceOf[AnyRef]), filter)
val nameAgeMap = getMap[String, Int](getFeatures(sftName, fs, "age = 60"), "name", "age")
nameAgeMap.size mustEqual 3
nameAgeMap should contain( "will" -> 60)
nameAgeMap should contain( "karen" -> 60)
nameAgeMap should contain( "bob" -> 60)
/* feature id should stay the same */
val featureIdMap = getMap[String, String](getFeatures(sftName, fs, "age = 60"),"name", (sf:SimpleFeature) => sf.getID)
featureIdMap.size mustEqual 3
featureIdMap should contain("will" -> "fid1")
featureIdMap should contain("karen" -> "fid4")
featureIdMap should contain("bob" -> "fid5")
}
"provide ability to add data inside transactions" in {
val ds = createStore
val fs = ds.getFeatureSource(sftName).asInstanceOf[AccumuloFeatureStore]
val sftType = ds.getSchema(sftName)
val geom = WKTUtils.read("POINT(45.0 49.0)")
val c = new DefaultFeatureCollection
c.add(AvroSimpleFeatureFactory.buildAvroFeature(sftType, Array("dude1", 15.asInstanceOf[AnyRef], geom, null, null), "fid10"))
c.add(AvroSimpleFeatureFactory.buildAvroFeature(sftType, Array("dude2", 16.asInstanceOf[AnyRef], geom, null, null), "fid11"))
c.add(AvroSimpleFeatureFactory.buildAvroFeature(sftType, Array("dude3", 17.asInstanceOf[AnyRef], geom, null, null), "fid12"))
val trans = new DefaultTransaction("trans1")
fs.setTransaction(trans)
try {
fs.addFeatures(c)
trans.commit()
val features = getFeatures(sftName, fs, "(age = 15) or (age = 16) or (age = 17)")
val nameAgeMap = getMap[String, Int](features, "name", "age")
nameAgeMap.size mustEqual 3
nameAgeMap should contain( "dude1" -> 15)
nameAgeMap should contain( "dude2" -> 16)
nameAgeMap should contain( "dude3" -> 17)
} catch {
case e: Exception =>
trans.rollback()
throw e
} finally {
trans.close()
}
}
"provide ability to remove inside transactions" in {
val ds = createStore
val fs = ds.getFeatureSource(sftName).asInstanceOf[AccumuloFeatureStore]
val trans = new DefaultTransaction("trans1")
fs.setTransaction(trans)
try {
fs.removeFeatures(CQL.toFilter("name = 'dude1' or name='dude2' or name='dude3'"))
trans.commit()
val nameAgeMap = getMap[String, Int](getFeatures(sftName, fs, "include"), "name", "age")
nameAgeMap.keySet should not contain("dude1")
nameAgeMap.keySet should not contain("dude2")
nameAgeMap.keySet should not contain("dude3")
nameAgeMap.keySet should containAllOf(List("will", "george", "sue", "karen", "bob"))
nameAgeMap.size mustEqual 5
} catch {
case e: Exception =>
trans.rollback()
throw e
}
finally {
trans.close()
}
}
"issue delete keys when geometry changes" in {
val ds = createStore
val fs = ds.getFeatureSource(sftName).asInstanceOf[AccumuloFeatureStore]
val featureCollection = new DefaultFeatureCollection(sftName, sft)
val filter = CQL.toFilter("name = 'bob' or name='karen'")
val writer = ds.getFeatureWriter(sftName, filter, Transaction.AUTO_COMMIT)
while(writer.hasNext){
val sf = writer.next
sf.setDefaultGeometry(WKTUtils.read("POINT(50.0 50)"))
writer.write
}
writer.close
// Verify old geo bbox doesn't return them
val map45 = getMap[String,Int](getFeatures(sftName, fs, "BBOX(geom, 44.9,48.9,45.1,49.1)"),"name", "age")
map45.keySet.size mustEqual 3
map45.keySet should containAllOf(List("will", "george", "sue"))
// Verify that new geometries are written with a bbox query that uses the index
val map50 = getMap[String,Int](getFeatures(sftName, fs, "BBOX(geom, 49.9,49.9,50.1,50.1)"),"name", "age")
map50.keySet.size mustEqual 2
map50.keySet should containAllOf(List("bob", "karen"))
// get them all
val mapLarge = getMap[String,Int](getFeatures(sftName, fs, "BBOX(geom, 44.0,44.0,51.0,51.0)"),"name", "age")
mapLarge.keySet.size mustEqual 5
mapLarge.keySet should containAllOf(List("will", "george", "sue", "bob", "karen"))
// get none
val mapNone = getMap[String,Int](getFeatures(sftName, fs, "BBOX(geom, 30.0,30.0,31.0,31.0)"),"name", "age")
mapNone.keySet.size mustEqual 0
}
"issue delete keys when datetime changes" in {
val ds = createStore
val fs = ds.getFeatureSource(sftName).asInstanceOf[AccumuloFeatureStore]
val attr = "dtg"
val filter = CQL.toFilter("name = 'will' or name='george'")
val writer = ds.getFeatureWriter(sftName, filter, Transaction.AUTO_COMMIT)
val newDate = sdf.parse("20140202")
while(writer.hasNext){
val sf = writer.next
sf.setAttribute(attr, newDate)
writer.write
}
writer.close
// Verify old daterange doesn't return them
val mapJan = getMap[String,Int](getFeatures(sftName, fs, s"$attr DURING 2013-12-29T00:00:00Z/2014-01-04T00:00:00Z"),"name", "age")
mapJan.keySet.size mustEqual 3
mapJan.keySet should containAllOf(List("sue", "bob", "karen"))
// Verify new date range returns things
val mapFeb = getMap[String,Int](getFeatures(sftName, fs, s"$attr DURING 2014-02-01T00:00:00Z/2014-02-03T00:00:00Z"),"name", "age")
mapFeb.keySet.size mustEqual 2
mapFeb.keySet should containAllOf(List("will","george"))
// Verify large date range returns everything
val mapJanFeb = getMap[String,Int](getFeatures(sftName, fs, s"$attr DURING 2014-01-01T00:00:00Z/2014-02-03T00:00:00Z"),"name", "age")
mapJanFeb.keySet.size mustEqual 5
mapJanFeb.keySet should containAllOf(List("will", "george", "sue", "bob", "karen"))
// Verify other date range returns nothing
val map2013 = getMap[String,Int](getFeatures(sftName, fs, s"$attr DURING 2013-01-01T00:00:00Z/2013-12-31T00:00:00Z"),"name", "age")
map2013.keySet.size mustEqual 0
}
"verify that start end times are excluded in filter" in {
val ds = createStore
val fs = ds.getFeatureSource(sftName).asInstanceOf[AccumuloFeatureStore]
val attr = "dtg"
val afterFilter = getMap[String,Int](getFeatures(sftName, fs, s"$attr AFTER 2014-02-02T00:00:00Z"), "name", "age")
afterFilter.keySet.size mustEqual 0
val beforeFilter = getMap[String,Int](getFeatures(sftName, fs, s"$attr BEFORE 2014-01-02T00:00:00Z"), "name", "age")
beforeFilter.keySet.size mustEqual 0
}
"ensure that feature IDs are not changed when spatiotemporal indexes change" in {
val ds = createStore
val fs = ds.getFeatureSource(sftName).asInstanceOf[AccumuloFeatureStore]
val origFeatures = {
val features = getFeatures(sftName, fs, "include")
val map = collection.mutable.HashMap.empty[String,SimpleFeature]
while(features.hasNext) {
val sf = features.next()
map.put(sf.getAttribute("name").asInstanceOf[String], sf)
}
map.toMap
}
origFeatures.size mustEqual 5
val filter = CQL.toFilter("include")
val writer = ds.getFeatureWriter(sftName, filter, Transaction.AUTO_COMMIT)
val attr = "dtg"
val newDate = sdf.parse("20120102")
while(writer.hasNext){
val sf = writer.next
sf.setAttribute(attr, newDate)
sf.setDefaultGeometry(WKTUtils.read("POINT(10.0 10.0)"))
writer.write
}
writer.close
val newFeatures = {
val features = getFeatures(sftName, fs, "include")
val map = collection.mutable.HashMap.empty[String,SimpleFeature]
while(features.hasNext) {
val sf = features.next()
map.put(sf.getAttribute("name").asInstanceOf[String], sf)
}
map.toMap
}
newFeatures.size mustEqual origFeatures.size
forallWhen(newFeatures.keys) { case k: String =>
val o = origFeatures(k)
val n = newFeatures(k)
o.getID mustEqual n.getID
o.getDefaultGeometry must not be equalTo(n.getDefaultGeometry)
o.getAttribute(attr) must not be equalTo(n.getAttribute(attr))
}
}
"verify delete and add same key works" in {
val ds = createStore
val fs = ds.getFeatureSource(sftName).asInstanceOf[AccumuloFeatureStore]
val deleteFilter = CQL.toFilter("name = 'will'")
val hints = ds.strategyHints(sft)
val q = new Query(sft.getTypeName, deleteFilter)
QueryStrategyDecider.chooseStrategy(sft, q, hints, INTERNAL_GEOMESA_VERSION) must beAnInstanceOf[AttributeIdxEqualsStrategy]
import org.locationtech.geomesa.utils.geotools.Conversions._
// Retrieve Will's ID before deletion.
val featuresBeforeDelete = getFeatures(sftName, fs, "name = 'will'")
val feats = featuresBeforeDelete.toList
feats.size mustEqual 1
val willId = feats.head.getID
fs.removeFeatures(deleteFilter)
// NB: We really need a test which reads from the attribute table directly since missing records entries
// will result in attribute queries
// This verifies that 'will' has been deleted from the attribute table.
val attributeTableFeatures = getMap[String,Int](getFeatures(sftName, fs, "name = 'will'"), "name", "age")
attributeTableFeatures.keySet.size mustEqual 0
// This verifies that 'will' has been deleted from the record table.
val recordTableFeatures = getMap[String,Int](getFeatures(sftName, fs, s"IN('$willId')"), "name", "age")
recordTableFeatures.keySet.size mustEqual 0
// This verifies that 'will' has been deleted from the ST idx table.
val stTableFeatures = getFeatures(sftName, fs, "BBOX(geom, 44.0,44.0,51.0,51.0)")
stTableFeatures.count(_.getID == willId) mustEqual 0
val featureCollection = new DefaultFeatureCollection(sftName, sft)
val sftType = ds.getSchema(sftName)
val geom = WKTUtils.read("POINT(10.0 10.0)")
val date = sdf.parse("20120102")
/* create a feature */
featureCollection.add(AvroSimpleFeatureFactory.buildAvroFeature(sftType, Array("will", 56.asInstanceOf[AnyRef], geom, date, null), "fid1"))
fs.addFeatures(featureCollection)
val features = getMap[String,Int](getFeatures(sftName, fs, "name = 'will'"), "name", "age")
features.keySet.size mustEqual 1
}
}
def getFeatures(sftName: String, store: AccumuloFeatureStore, cql: String): SimpleFeatureIterator = {
val query = new Query(sftName, ECQL.toFilter(cql))
val results = store.getFeatures(query)
results.features
}
def getMap[K,V](features: SimpleFeatureIterator, keyAttr: String, valAttr: String) : Map[K, V] = {
getMap[K,V](features, keyAttr, (sf:SimpleFeature) => sf.getAttribute(valAttr))
}
def getMap[K,V](features: SimpleFeatureIterator, keyAttr: String, valFunc: (SimpleFeature => AnyRef)) : Map[K, V] = {
val map = collection.mutable.HashMap.empty[K, V]
while(features.hasNext) {
val sf = features.next()
map.put(sf.getAttribute(keyAttr).asInstanceOf[K], valFunc(sf).asInstanceOf[V])
}
map.toMap
}
def countFeatures(store:AccumuloFeatureStore, sftName:String): Int = {
val features = getFeatures(sftName, store, "include")
var count = 0
while(features.hasNext) { features.next(); count += 1}
count
}
}
| kevinwheeler/geomesa | geomesa-core/src/test/scala/org/locationtech/geomesa/core/data/FeatureWritersTest.scala | Scala | apache-2.0 | 20,457 |
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.models.workflows
case class Variables()
| deepsense-io/seahorse-workflow-executor | models/src/main/scala/io/deepsense/models/workflows/Variables.scala | Scala | apache-2.0 | 661 |
package edu.berkeley.nlp.entity.coref
import scala.collection.JavaConverters._
class OrderedClusteringBound(val ments: Seq[Mention],
val clustering: OrderedClustering) {
def postprocessForConll(): OrderedClusteringBound = {
val mentIdxsToKeep = (0 until ments.size).filter(i => !clustering.isSingleton(i));
new OrderedClusteringBound(mentIdxsToKeep.map(i => ments(i)), clustering.getSubclustering(mentIdxsToKeep));
}
def getClusterIdx(ment: Mention) = {
clustering.getClusterIdx(ments.indexOf(ment));
}
}
| matthewfl/berkeley-entity | src/main/java/edu/berkeley/nlp/entity/coref/OrderedClusteringBound.scala | Scala | gpl-3.0 | 561 |
package org.jetbrains.plugins.scala.failed.annotator
import org.jetbrains.plugins.scala.PerfCycleTests
import org.jetbrains.plugins.scala.base.ScalaLightCodeInsightFixtureTestAdapter
import org.junit.experimental.categories.Category
/**
* Created by user on 3/28/16.
*/
@Category(Array(classOf[PerfCycleTests]))
class UnspecifiedValueParamImplicitTest extends ScalaLightCodeInsightFixtureTestAdapter {
def testSCL10045(): Unit = {
checkTextHasNoErrors(
"""
|class Repro {
| implicit val i: Int = 0
|
| new ReproDep // Warning: "Unspecified value parameters: i: Int"
|}
|
|class ReproDep(private implicit val i: Int)
""".stripMargin)
}
def testSCL5768(): Unit = {
checkTextHasNoErrors(
"""
|object Foo {
| trait Tx
| trait Foo { def apply()(implicit tx: Tx): Bar }
| trait Bar { def update(value: Int): Unit }
|
| def test(foo: Foo)(implicit tx: Tx) {
| foo()() = 1 // second () is highlighted red
| }
| }
""".stripMargin)
}
}
| whorbowicz/intellij-scala | test/org/jetbrains/plugins/scala/failed/annotator/UnspecifiedValueParamImplicitTest.scala | Scala | apache-2.0 | 1,130 |
/**
* Copyright 2017 Interel
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package core3.test.specs.perf.time.workflows
import akka.pattern.ask
import core3.config.StaticConfig
import core3.test.fixtures
import core3.test.fixtures.TestSystem._
import core3.test.fixtures.workflows.AddGroups
import core3.test.specs.perf.PerformanceTimeSpec
import core3.test.utils._
import core3.workflows.WorkflowEngineComponent._
import org.scalameter.api._
import play.api.libs.json.Json
import scala.concurrent.duration._
object WorkflowEngineSpec extends PerformanceTimeSpec {
implicit private val waitDuration: FiniteDuration = 15.seconds
private val maxContainers = 1000
private val workflowsCount: Gen[Int] = Gen.range("workflowsCount")(from = 0, upto = maxContainers, hop = 100)
private val db = fixtures.Database.createMemoryOnlyDBInstance()
private val engine = fixtures.Workflows.createWorkflowEngine(db)
private val authorizedUser = fixtures.Workflows.createAuthorizedUser()
private val resultDirConfigPath = "testing.reports.time.dsvPath"
private val resultDir = if (StaticConfig.get.hasPath(resultDirConfigPath)) {
StaticConfig.get.getString(resultDirConfigPath)
} else {
"target/specs.perf/time/dsv"
}
performance of "time.core3.database.dals" config(
reports.resultDir -> resultDir,
exec.benchRuns -> 10,
exec.independentSamples -> 4,
exec.warmupCovThreshold -> 0.5
) in {
performance of "WorkflowEngineComponent" in {
measure method "ExecuteWorkflow" in {
using(workflowsCount)
.config {
dsl.curve -> "AddGroups"
}
.beforeTests {
db.clearDatabaseStructure("Group").ignoreFailure.await
db.buildDatabaseStructure("Group").await
db.clearDatabaseStructure("TransactionLog").ignoreFailure.await
db.buildDatabaseStructure("TransactionLog").await
}
.in {
count =>
(engine ? ExecuteWorkflow(AddGroups.name, Json.obj("count" -> count), authorizedUser)).await
}
}
}
}
}
| Interel-Group/core3 | src/test/scala/core3/test/specs/perf/time/workflows/WorkflowEngineSpec.scala | Scala | apache-2.0 | 2,618 |
package com.twitter.finatra.http.tests.integration.doeverything.main.domain
import com.twitter.finatra.request.QueryParam
case class RequestWithQueryParamSeqString(
@QueryParam foo: Seq[String])
case class ResponseOfQueryParamSeqString(
foo: Seq[String])
case class RequestWithQueryParamSeqLong(
@QueryParam foo: Seq[Long])
case class ResponseOfQueryParamSeqLong(
foo: Seq[Long])
case class RequestWithIntQueryParams(
@QueryParam param: Seq[Int])
case class RequestWithShortQueryParams(
@QueryParam param: Seq[Int])
case class RequestWithBooleanQueryParams(
@QueryParam param: Seq[Boolean])
case class RequestWithOptionBooleanQueryParam(
@QueryParam param: Option[Boolean])
case class RequestWithBooleanQueryParam(
@QueryParam param: Boolean)
case class RequestWithCaseClassQueryParams(
@QueryParam param: Seq[AnotherCaseClass])
case class AnotherCaseClass(
foo: String)
| syamantm/finatra | http/src/test/scala/com/twitter/finatra/http/tests/integration/doeverything/main/domain/RequestWithQueryParamSeqString.scala | Scala | apache-2.0 | 905 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.yarn
import java.io.{File, FileOutputStream, OutputStreamWriter}
import java.util.Properties
import java.util.concurrent.TimeUnit
import scala.collection.JavaConversions._
import scala.collection.mutable
import com.google.common.base.Charsets.UTF_8
import com.google.common.io.ByteStreams
import com.google.common.io.Files
import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.apache.hadoop.yarn.server.MiniYARNCluster
import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers}
import org.apache.spark.{Logging, SparkConf, SparkContext, SparkException, TestUtils}
import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.scheduler.{SparkListener, SparkListenerExecutorAdded}
import org.apache.spark.util.Utils
/**
* Integration tests for YARN; these tests use a mini Yarn cluster to run Spark-on-YARN
* applications, and require the Spark assembly to be built before they can be successfully
* run.
*/
class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers with Logging {
// log4j configuration for the YARN containers, so that their output is collected
// by YARN instead of trying to overwrite unit-tests.log.
private val LOG4J_CONF = """
|log4j.rootCategory=DEBUG, console
|log4j.appender.console=org.apache.log4j.ConsoleAppender
|log4j.appender.console.target=System.err
|log4j.appender.console.layout=org.apache.log4j.PatternLayout
|log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
""".stripMargin
private val TEST_PYFILE = """
|import sys
|from operator import add
|
|from pyspark import SparkConf , SparkContext
|if __name__ == "__main__":
| if len(sys.argv) != 2:
| print >> sys.stderr, "Usage: test.py [result file]"
| exit(-1)
| sc = SparkContext(conf=SparkConf())
| status = open(sys.argv[1],'w')
| result = "failure"
| rdd = sc.parallelize(range(10))
| cnt = rdd.count()
| if cnt == 10:
| result = "success"
| status.write(result)
| status.close()
| sc.stop()
""".stripMargin
private var yarnCluster: MiniYARNCluster = _
private var tempDir: File = _
private var fakeSparkJar: File = _
private var logConfDir: File = _
override def beforeAll() {
super.beforeAll()
tempDir = Utils.createTempDir()
logConfDir = new File(tempDir, "log4j")
logConfDir.mkdir()
val logConfFile = new File(logConfDir, "log4j.properties")
Files.write(LOG4J_CONF, logConfFile, UTF_8)
yarnCluster = new MiniYARNCluster(getClass().getName(), 1, 1, 1)
yarnCluster.init(new YarnConfiguration())
yarnCluster.start()
// There's a race in MiniYARNCluster in which start() may return before the RM has updated
// its address in the configuration. You can see this in the logs by noticing that when
// MiniYARNCluster prints the address, it still has port "0" assigned, although later the
// test works sometimes:
//
// INFO MiniYARNCluster: MiniYARN ResourceManager address: blah:0
//
// That log message prints the contents of the RM_ADDRESS config variable. If you check it
// later on, it looks something like this:
//
// INFO YarnClusterSuite: RM address in configuration is blah:42631
//
// This hack loops for a bit waiting for the port to change, and fails the test if it hasn't
// done so in a timely manner (defined to be 10 seconds).
val config = yarnCluster.getConfig()
val deadline = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(10)
while (config.get(YarnConfiguration.RM_ADDRESS).split(":")(1) == "0") {
if (System.currentTimeMillis() > deadline) {
throw new IllegalStateException("Timed out waiting for RM to come up.")
}
logDebug("RM address still not set in configuration, waiting...")
TimeUnit.MILLISECONDS.sleep(100)
}
logInfo(s"RM address in configuration is ${config.get(YarnConfiguration.RM_ADDRESS)}")
fakeSparkJar = File.createTempFile("sparkJar", null, tempDir)
}
override def afterAll() {
yarnCluster.stop()
super.afterAll()
}
test("run Spark in yarn-client mode") {
testBasicYarnApp(true)
}
test("run Spark in yarn-cluster mode") {
testBasicYarnApp(false)
}
test("run Spark in yarn-cluster mode unsuccessfully") {
// Don't provide arguments so the driver will fail.
val exception = intercept[SparkException] {
runSpark(false, mainClassName(YarnClusterDriver.getClass))
fail("Spark application should have failed.")
}
}
test("run Python application in yarn-cluster mode") {
val primaryPyFile = new File(tempDir, "test.py")
Files.write(TEST_PYFILE, primaryPyFile, UTF_8)
val pyFile = new File(tempDir, "test2.py")
Files.write(TEST_PYFILE, pyFile, UTF_8)
var result = File.createTempFile("result", null, tempDir)
// The sbt assembly does not include pyspark / py4j python dependencies, so we need to
// propagate SPARK_HOME so that those are added to PYTHONPATH. See PythonUtils.scala.
val sparkHome = sys.props("spark.test.home")
val extraConf = Map(
"spark.executorEnv.SPARK_HOME" -> sparkHome,
"spark.yarn.appMasterEnv.SPARK_HOME" -> sparkHome)
runSpark(false, primaryPyFile.getAbsolutePath(),
sparkArgs = Seq("--py-files", pyFile.getAbsolutePath()),
appArgs = Seq(result.getAbsolutePath()),
extraConf = extraConf)
checkResult(result)
}
test("user class path first in client mode") {
testUseClassPathFirst(true)
}
test("user class path first in cluster mode") {
testUseClassPathFirst(false)
}
private def testBasicYarnApp(clientMode: Boolean): Unit = {
var result = File.createTempFile("result", null, tempDir)
runSpark(clientMode, mainClassName(YarnClusterDriver.getClass),
appArgs = Seq(result.getAbsolutePath()))
checkResult(result)
}
private def testUseClassPathFirst(clientMode: Boolean): Unit = {
// Create a jar file that contains a different version of "test.resource".
val originalJar = TestUtils.createJarWithFiles(Map("test.resource" -> "ORIGINAL"), tempDir)
val userJar = TestUtils.createJarWithFiles(Map("test.resource" -> "OVERRIDDEN"), tempDir)
val driverResult = File.createTempFile("driver", null, tempDir)
val executorResult = File.createTempFile("executor", null, tempDir)
runSpark(clientMode, mainClassName(YarnClasspathTest.getClass),
appArgs = Seq(driverResult.getAbsolutePath(), executorResult.getAbsolutePath()),
extraClassPath = Seq(originalJar.getPath()),
extraJars = Seq("local:" + userJar.getPath()),
extraConf = Map(
"spark.driver.userClassPathFirst" -> "true",
"spark.executor.userClassPathFirst" -> "true"))
checkResult(driverResult, "OVERRIDDEN")
checkResult(executorResult, "OVERRIDDEN")
}
private def runSpark(
clientMode: Boolean,
klass: String,
appArgs: Seq[String] = Nil,
sparkArgs: Seq[String] = Nil,
extraClassPath: Seq[String] = Nil,
extraJars: Seq[String] = Nil,
extraConf: Map[String, String] = Map()): Unit = {
val master = if (clientMode) "yarn-client" else "yarn-cluster"
val props = new Properties()
props.setProperty("spark.yarn.jar", "local:" + fakeSparkJar.getAbsolutePath())
val childClasspath = logConfDir.getAbsolutePath() +
File.pathSeparator +
sys.props("java.class.path") +
File.pathSeparator +
extraClassPath.mkString(File.pathSeparator)
props.setProperty("spark.driver.extraClassPath", childClasspath)
props.setProperty("spark.executor.extraClassPath", childClasspath)
// SPARK-4267: make sure java options are propagated correctly.
props.setProperty("spark.driver.extraJavaOptions", "-Dfoo=\\"one two three\\"")
props.setProperty("spark.executor.extraJavaOptions", "-Dfoo=\\"one two three\\"")
yarnCluster.getConfig().foreach { e =>
props.setProperty("spark.hadoop." + e.getKey(), e.getValue())
}
sys.props.foreach { case (k, v) =>
if (k.startsWith("spark.")) {
props.setProperty(k, v)
}
}
extraConf.foreach { case (k, v) => props.setProperty(k, v) }
val propsFile = File.createTempFile("spark", ".properties", tempDir)
val writer = new OutputStreamWriter(new FileOutputStream(propsFile), UTF_8)
props.store(writer, "Spark properties.")
writer.close()
val extraJarArgs = if (!extraJars.isEmpty()) Seq("--jars", extraJars.mkString(",")) else Nil
val mainArgs =
if (klass.endsWith(".py")) {
Seq(klass)
} else {
Seq("--class", klass, fakeSparkJar.getAbsolutePath())
}
val argv =
Seq(
new File(sys.props("spark.test.home"), "bin/spark-submit").getAbsolutePath(),
"--master", master,
"--num-executors", "1",
"--properties-file", propsFile.getAbsolutePath()) ++
extraJarArgs ++
sparkArgs ++
mainArgs ++
appArgs
Utils.executeAndGetOutput(argv,
extraEnvironment = Map("YARN_CONF_DIR" -> tempDir.getAbsolutePath()))
}
/**
* This is a workaround for an issue with yarn-cluster mode: the Client class will not provide
* any sort of error when the job process finishes successfully, but the job itself fails. So
* the tests enforce that something is written to a file after everything is ok to indicate
* that the job succeeded.
*/
private def checkResult(result: File): Unit = {
checkResult(result, "success")
}
private def checkResult(result: File, expected: String): Unit = {
var resultString = Files.toString(result, UTF_8)
resultString should be (expected)
}
private def mainClassName(klass: Class[_]): String = {
klass.getName().stripSuffix("$")
}
}
private class SaveExecutorInfo extends SparkListener {
val addedExecutorInfos = mutable.Map[String, ExecutorInfo]()
override def onExecutorAdded(executor : SparkListenerExecutorAdded) {
addedExecutorInfos(executor.executorId) = executor.executorInfo
}
}
private object YarnClusterDriver extends Logging with Matchers {
val WAIT_TIMEOUT_MILLIS = 10000
var listener: SaveExecutorInfo = null
def main(args: Array[String]): Unit = {
if (args.length != 1) {
System.err.println(
s"""
|Invalid command line: ${args.mkString(" ")}
|
|Usage: YarnClusterDriver [result file]
""".stripMargin)
System.exit(1)
}
listener = new SaveExecutorInfo
val sc = new SparkContext(new SparkConf()
.setAppName("yarn \\"test app\\" 'with quotes' and \\\\back\\\\slashes and $dollarSigns"))
sc.addSparkListener(listener)
val status = new File(args(0))
var result = "failure"
try {
val data = sc.parallelize(1 to 4, 4).collect().toSet
assert(sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS))
data should be (Set(1, 2, 3, 4))
result = "success"
} finally {
sc.stop()
Files.write(result, status, UTF_8)
}
// verify log urls are present
listener.addedExecutorInfos.values.foreach { info =>
assert(info.logUrlMap.nonEmpty)
}
}
}
private object YarnClasspathTest {
def main(args: Array[String]): Unit = {
if (args.length != 2) {
System.err.println(
s"""
|Invalid command line: ${args.mkString(" ")}
|
|Usage: YarnClasspathTest [driver result file] [executor result file]
""".stripMargin)
System.exit(1)
}
readResource(args(0))
val sc = new SparkContext(new SparkConf())
try {
sc.parallelize(Seq(1)).foreach { x => readResource(args(1)) }
} finally {
sc.stop()
}
}
private def readResource(resultPath: String): Unit = {
var result = "failure"
try {
val ccl = Thread.currentThread().getContextClassLoader()
val resource = ccl.getResourceAsStream("test.resource")
val bytes = ByteStreams.toByteArray(resource)
result = new String(bytes, 0, bytes.length, UTF_8)
} finally {
Files.write(result, new File(resultPath), UTF_8)
}
}
}
| hengyicai/OnlineAggregationUCAS | yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala | Scala | apache-2.0 | 13,003 |
package pl.touk.nussknacker.engine.util.definition
import pl.touk.nussknacker.engine.api.LazyParameter
import pl.touk.nussknacker.engine.api.typed.TypedMap
import pl.touk.nussknacker.engine.api.typed.typing.{Typed, TypedClass, TypedObjectTypingResult, TypingResult}
import scala.collection.immutable.ListMap
object LazyParameterUtils {
def typedMap(params: ListMap[String, LazyParameter[AnyRef]]): LazyParameter[TypedMap] = {
def wrapResultType(list: List[TypingResult]): TypingResult = {
TypedObjectTypingResult(
params.toList.map(_._1).zip(list).map {
case (fieldName, TypedClass(_, _ :: valueType :: Nil)) =>
fieldName -> valueType
case other =>
throw new IllegalArgumentException(s"Unexpected result of type transformation returned by sequence: $other")
}
)
}
val paramsSeq = params.toList.map {
case (key, value) => LazyParameter.pure(key, Typed[String]).product(value)
}
LazyParameter.sequence[(String, AnyRef), TypedMap](paramsSeq, seq => TypedMap(seq.toMap), wrapResultType)
}
}
| TouK/nussknacker | utils/components-utils/src/main/scala/pl/touk/nussknacker/engine/util/definition/LazyParameterUtils.scala | Scala | apache-2.0 | 1,094 |
package com.twitter.concurrent
import org.junit.runner.RunWith
import org.scalatest.WordSpec
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class ConcurrentPoolTest extends WordSpec {
"reserve items" in {
val pool = new ConcurrentPool[Int, Int]
pool.put(1, 2)
assert(pool.get(1) === Some(2))
assert(pool.get(1) === None)
}
"yield items in FIFO order" in {
val pool = new ConcurrentPool[Int, Int]
for (i <- 0 until 10)
pool.put(1, i)
for (i <- 0 until 10)
assert(pool.get(1) === Some(i))
assert(pool.get(1) === None)
}
"kill empty lists" in {
val pool = new ConcurrentPool[Int, Int]
pool.put(1, 1)
assert(pool.get(1) === Some(1))
assert(pool.map.containsKey(1) === false)
assert(pool.deathQueue.size === 1)
pool.put(2, 1)
assert(pool.deathQueue.size === 0)
}
// Can't really test the race condition case :-/
}
| travisbrown/util | util-core/src/test/scala/com/twitter/concurrent/ConcurrentPoolTest.scala | Scala | apache-2.0 | 930 |
package org.ivnbl.gametools.il2cod
import javax.servlet.http.{HttpServletResponse, HttpServletRequest, HttpServlet}
import java.util.{Calendar, Date}
import java.io.{InputStream, OutputStream}
import org.slf4j.LoggerFactory
import javax.servlet.ServletConfig
class VaadinStaticResourceServlet extends HttpServlet {
val log = LoggerFactory.getLogger(getClass)
var servletInitDate: Date = null
override def init() {
super.init()
servletInitDate = new Date()
}
override def init(servletConfig: ServletConfig) {
super.init(servletConfig)
log.info("Servlet Name: {}", servletConfig.getServletName)
}
override def doGet(req: HttpServletRequest, resp: HttpServletResponse) {
//Expire all cached resources in one hour
val expiredCalendar = Calendar.getInstance
expiredCalendar.add(Calendar.HOUR, 1) //let it be cached by the browser for one hour
//Caching headers
resp.addDateHeader("Last-Modified", servletInitDate.getTime)
resp.addDateHeader("Expires", expiredCalendar.getTime.getTime)
req.getDateHeader("If-Modified-Since") match {
case since if (new Date(since).after(servletInitDate)) => {
resp.sendError(HttpServletResponse.SC_NOT_MODIFIED)
}
case _ => streamRequestedResource(req,resp)
}
}
private def streamRequestedResource(req: HttpServletRequest, resp: HttpServletResponse) {
//Use Vaadin bundle class loader to provide static resources
val pathToResource = "/VAADIN" + req.getPathInfo
val resourceInStream = classOf[com.vaadin.Application].getResourceAsStream(pathToResource)
try {
if (null == resourceInStream) {
//No such resource
resp.sendError(HttpServletResponse.SC_NOT_FOUND)
return
}
streamResource(resp.getOutputStream, resourceInStream)
} finally {
if (null != resourceInStream) resourceInStream.close()
}
}
private def streamResource(out: OutputStream, in: InputStream) {
Stream.continually(in.read).takeWhile(-1 != _).foreach(out.write(_))
}
}
| ivanobulo/IL-2-COD-server-ui | il2-cod-webapp/src/main/scala/org/ivnbl/gametools/il2cod/VaadinStaticResourceServlet.scala | Scala | apache-2.0 | 2,044 |
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt
import std._
import xsbt.api.{ Discovered, Discovery }
import inc.Analysis
import TaskExtra._
import sbt.internal.util.FeedbackProvidedException
import sbt.internal.util.Types._
import xsbti.api.Definition
import ConcurrentRestrictions.Tag
import testing.{ AnnotatedFingerprint, Fingerprint, Framework, SubclassFingerprint, Runner, TaskDef, SuiteSelector, Task => TestTask }
import scala.annotation.tailrec
import java.io.File
import sbt.util.Logger
sealed trait TestOption
object Tests {
/**
* The result of a test run.
*
* @param overall The overall result of execution across all tests for all test frameworks in this test run.
* @param events The result of each test group (suite) executed during this test run.
* @param summaries Explicit summaries directly provided by test frameworks. This may be empty, in which case a default summary will be generated.
*/
final case class Output(overall: TestResult.Value, events: Map[String, SuiteResult], summaries: Iterable[Summary])
/**
* Summarizes a test run.
*
* @param name The name of the test framework providing this summary.
* @param summaryText The summary message for tests run by the test framework.
*/
final case class Summary(name: String, summaryText: String)
/**
* Defines a TestOption that will evaluate `setup` before any tests execute.
* The ClassLoader provided to `setup` is the loader containing the test classes that will be run.
* Setup is not currently performed for forked tests.
*/
final case class Setup(setup: ClassLoader => Unit) extends TestOption
/**
* Defines a TestOption that will evaluate `setup` before any tests execute.
* Setup is not currently performed for forked tests.
*/
def Setup(setup: () => Unit) = new Setup(_ => setup())
/**
* Defines a TestOption that will evaluate `cleanup` after all tests execute.
* The ClassLoader provided to `cleanup` is the loader containing the test classes that ran.
* Cleanup is not currently performed for forked tests.
*/
final case class Cleanup(cleanup: ClassLoader => Unit) extends TestOption
/**
* Defines a TestOption that will evaluate `cleanup` after all tests execute.
* Cleanup is not currently performed for forked tests.
*/
def Cleanup(cleanup: () => Unit) = new Cleanup(_ => cleanup())
/** The names of tests to explicitly exclude from execution. */
final case class Exclude(tests: Iterable[String]) extends TestOption
final case class Listeners(listeners: Iterable[TestReportListener]) extends TestOption
/** Selects tests by name to run. Only tests for which `filterTest` returns true will be run. */
final case class Filter(filterTest: String => Boolean) extends TestOption
/** Test execution will be ordered by the position of the matching filter. */
final case class Filters(filterTest: Seq[String => Boolean]) extends TestOption
/** Defines a TestOption that passes arguments `args` to all test frameworks. */
def Argument(args: String*): Argument = Argument(None, args.toList)
/** Defines a TestOption that passes arguments `args` to only the test framework `tf`. */
def Argument(tf: TestFramework, args: String*): Argument = Argument(Some(tf), args.toList)
/**
* Defines arguments to pass to test frameworks.
*
* @param framework The test framework the arguments apply to if one is specified in Some.
* If None, the arguments will apply to all test frameworks.
* @param args The list of arguments to pass to the selected framework(s).
*/
final case class Argument(framework: Option[TestFramework], args: List[String]) extends TestOption
/**
* Configures test execution.
*
* @param options The options to apply to this execution, including test framework arguments, filters,
* and setup and cleanup work.
* @param parallel If true, execute each unit of work returned by the test frameworks in separate sbt.Tasks.
* If false, execute all work in a single sbt.Task.
* @param tags The tags that should be added to each test task. These can be used to apply restrictions on
* concurrent execution.
*/
final case class Execution(options: Seq[TestOption], parallel: Boolean, tags: Seq[(Tag, Int)])
/** Configures whether a group of tests runs in the same JVM or are forked. */
sealed trait TestRunPolicy
/** Configures a group of tests to run in the same JVM. */
case object InProcess extends TestRunPolicy
/** Configures a group of tests to be forked in a new JVM with forking options specified by `config`. */
final case class SubProcess(config: ForkOptions) extends TestRunPolicy
object SubProcess {
@deprecated("Construct SubProcess with a ForkOptions argument.", "0.13.0")
def apply(javaOptions: Seq[String]): SubProcess = SubProcess(ForkOptions(runJVMOptions = javaOptions))
}
/** A named group of tests configured to run in the same JVM or be forked. */
final case class Group(name: String, tests: Seq[TestDefinition], runPolicy: TestRunPolicy)
private[sbt] final class ProcessedOptions(
val tests: Seq[TestDefinition],
val setup: Seq[ClassLoader => Unit],
val cleanup: Seq[ClassLoader => Unit],
val testListeners: Seq[TestReportListener])
private[sbt] def processOptions(config: Execution, discovered: Seq[TestDefinition], log: Logger): ProcessedOptions =
{
import collection.mutable.{ HashSet, ListBuffer, Map, Set }
val testFilters = new ListBuffer[String => Boolean]
var orderedFilters = Seq[String => Boolean]()
val excludeTestsSet = new HashSet[String]
val setup, cleanup = new ListBuffer[ClassLoader => Unit]
val testListeners = new ListBuffer[TestReportListener]
val undefinedFrameworks = new ListBuffer[String]
for (option <- config.options) {
option match {
case Filter(include) => testFilters += include
case Filters(includes) => if (orderedFilters.nonEmpty) sys.error("Cannot define multiple ordered test filters.") else orderedFilters = includes
case Exclude(exclude) => excludeTestsSet ++= exclude
case Listeners(listeners) => testListeners ++= listeners
case Setup(setupFunction) => setup += setupFunction
case Cleanup(cleanupFunction) => cleanup += cleanupFunction
case a: Argument => // now handled by whatever constructs `runners`
}
}
if (excludeTestsSet.nonEmpty)
log.debug(excludeTestsSet.mkString("Excluding tests: \\n\\t", "\\n\\t", ""))
if (undefinedFrameworks.nonEmpty)
log.warn("Arguments defined for test frameworks that are not present:\\n\\t" + undefinedFrameworks.mkString("\\n\\t"))
def includeTest(test: TestDefinition) = !excludeTestsSet.contains(test.name) && testFilters.forall(filter => filter(test.name))
val filtered0 = discovered.filter(includeTest).toList.distinct
val tests = if (orderedFilters.isEmpty) filtered0 else orderedFilters.flatMap(f => filtered0.filter(d => f(d.name))).toList.distinct
val uniqueTests = distinctBy(tests)(_.name)
new ProcessedOptions(uniqueTests, setup.toList, cleanup.toList, testListeners.toList)
}
private[this] def distinctBy[T, K](in: Seq[T])(f: T => K): Seq[T] =
{
val seen = new collection.mutable.HashSet[K]
in.filter(t => seen.add(f(t)))
}
def apply(frameworks: Map[TestFramework, Framework], testLoader: ClassLoader, runners: Map[TestFramework, Runner], discovered: Seq[TestDefinition], config: Execution, log: Logger): Task[Output] =
{
val o = processOptions(config, discovered, log)
testTask(testLoader, frameworks, runners, o.tests, o.setup, o.cleanup, log, o.testListeners, config)
}
def testTask(loader: ClassLoader, frameworks: Map[TestFramework, Framework], runners: Map[TestFramework, Runner], tests: Seq[TestDefinition],
userSetup: Iterable[ClassLoader => Unit], userCleanup: Iterable[ClassLoader => Unit],
log: Logger, testListeners: Seq[TestReportListener], config: Execution): Task[Output] =
{
def fj(actions: Iterable[() => Unit]): Task[Unit] = nop.dependsOn(actions.toSeq.fork(_()): _*)
def partApp(actions: Iterable[ClassLoader => Unit]) = actions.toSeq map { a => () => a(loader) }
val (frameworkSetup, runnables, frameworkCleanup) =
TestFramework.testTasks(frameworks, runners, loader, tests, log, testListeners)
val setupTasks = fj(partApp(userSetup) :+ frameworkSetup)
val mainTasks =
if (config.parallel)
makeParallel(loader, runnables, setupTasks, config.tags) //.toSeq.join
else
makeSerial(loader, runnables, setupTasks, config.tags)
val taggedMainTasks = mainTasks.tagw(config.tags: _*)
taggedMainTasks map processResults flatMap { results =>
val cleanupTasks = fj(partApp(userCleanup) :+ frameworkCleanup(results.overall))
cleanupTasks map { _ => results }
}
}
type TestRunnable = (String, TestFunction)
private def createNestedRunnables(loader: ClassLoader, testFun: TestFunction, nestedTasks: Seq[TestTask]): Seq[(String, TestFunction)] =
nestedTasks.view.zipWithIndex map {
case (nt, idx) =>
val testFunDef = testFun.taskDef
(testFunDef.fullyQualifiedName, TestFramework.createTestFunction(loader, new TaskDef(testFunDef.fullyQualifiedName + "-" + idx, testFunDef.fingerprint, testFunDef.explicitlySpecified, testFunDef.selectors), testFun.runner, nt))
}
def makeParallel(loader: ClassLoader, runnables: Iterable[TestRunnable], setupTasks: Task[Unit], tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] =
toTasks(loader, runnables.toSeq, tags).dependsOn(setupTasks)
def toTasks(loader: ClassLoader, runnables: Seq[TestRunnable], tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = {
val tasks = runnables.map { case (name, test) => toTask(loader, name, test, tags) }
tasks.join.map(_.foldLeft(Map.empty[String, SuiteResult]) {
case (sum, e) =>
val merged = sum.toSeq ++ e.toSeq
val grouped = merged.groupBy(_._1)
grouped.mapValues(_.map(_._2).foldLeft(SuiteResult.Empty) {
case (resultSum, result) => resultSum + result
})
})
}
def toTask(loader: ClassLoader, name: String, fun: TestFunction, tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = {
val base = task { (name, fun.apply()) }
val taggedBase = base.tagw(tags: _*).tag(fun.tags.map(ConcurrentRestrictions.Tag(_)): _*)
taggedBase flatMap {
case (name, (result, nested)) =>
val nestedRunnables = createNestedRunnables(loader, fun, nested)
toTasks(loader, nestedRunnables, tags).map { currentResultMap =>
val newResult =
currentResultMap.get(name) match {
case Some(currentResult) => currentResult + result
case None => result
}
currentResultMap.updated(name, newResult)
}
}
}
def makeSerial(loader: ClassLoader, runnables: Seq[TestRunnable], setupTasks: Task[Unit], tags: Seq[(Tag, Int)]): Task[List[(String, SuiteResult)]] =
{
@tailrec
def processRunnable(runnableList: List[TestRunnable], acc: List[(String, SuiteResult)]): List[(String, SuiteResult)] =
runnableList match {
case hd :: rst =>
val testFun = hd._2
val (result, nestedTasks) = testFun.apply()
val nestedRunnables = createNestedRunnables(loader, testFun, nestedTasks)
processRunnable(nestedRunnables.toList ::: rst, (hd._1, result) :: acc)
case Nil => acc
}
task { processRunnable(runnables.toList, List.empty) } dependsOn (setupTasks)
}
def processResults(results: Iterable[(String, SuiteResult)]): Output =
Output(overall(results.map(_._2.result)), results.toMap, Iterable.empty)
def foldTasks(results: Seq[Task[Output]], parallel: Boolean): Task[Output] =
if (results.isEmpty)
task { Output(TestResult.Passed, Map.empty, Nil) }
else if (parallel)
reduced(results.toIndexedSeq, {
case (Output(v1, m1, _), Output(v2, m2, _)) => Output(if (v1.id < v2.id) v2 else v1, m1 ++ m2, Iterable.empty)
})
else {
def sequence(tasks: List[Task[Output]], acc: List[Output]): Task[List[Output]] = tasks match {
case Nil => task(acc.reverse)
case hd :: tl => hd flatMap { out => sequence(tl, out :: acc) }
}
sequence(results.toList, List()) map { ress =>
val (rs, ms) = ress.unzip { e => (e.overall, e.events) }
Output(overall(rs), ms reduce (_ ++ _), Iterable.empty)
}
}
def overall(results: Iterable[TestResult.Value]): TestResult.Value =
(TestResult.Passed /: results) { (acc, result) => if (acc.id < result.id) result else acc }
def discover(frameworks: Seq[Framework], analysis: Analysis, log: Logger): (Seq[TestDefinition], Set[String]) =
discover(frameworks flatMap TestFramework.getFingerprints, allDefs(analysis), log)
def allDefs(analysis: Analysis) = analysis.apis.internal.values.flatMap(_.api.definitions).toSeq
def discover(fingerprints: Seq[Fingerprint], definitions: Seq[Definition], log: Logger): (Seq[TestDefinition], Set[String]) =
{
val subclasses = fingerprints collect { case sub: SubclassFingerprint => (sub.superclassName, sub.isModule, sub) };
val annotations = fingerprints collect { case ann: AnnotatedFingerprint => (ann.annotationName, ann.isModule, ann) };
log.debug("Subclass fingerprints: " + subclasses)
log.debug("Annotation fingerprints: " + annotations)
def firsts[A, B, C](s: Seq[(A, B, C)]): Set[A] = s.map(_._1).toSet
def defined(in: Seq[(String, Boolean, Fingerprint)], names: Set[String], IsModule: Boolean): Seq[Fingerprint] =
in collect { case (name, IsModule, print) if names(name) => print }
def toFingerprints(d: Discovered): Seq[Fingerprint] =
defined(subclasses, d.baseClasses, d.isModule) ++
defined(annotations, d.annotations, d.isModule)
val discovered = Discovery(firsts(subclasses), firsts(annotations))(definitions)
// TODO: To pass in correct explicitlySpecified and selectors
val tests = for ((df, di) <- discovered; fingerprint <- toFingerprints(di)) yield new TestDefinition(df.name, fingerprint, false, Array(new SuiteSelector))
val mains = discovered collect { case (df, di) if di.hasMain => df.name }
(tests, mains.toSet)
}
@deprecated("Tests.showResults() has been superseded with TestResultLogger and setting 'testResultLogger'.", "0.13.5")
def showResults(log: Logger, results: Output, noTestsMessage: => String): Unit =
TestResultLogger.Default.copy(printNoTests = TestResultLogger.const(_ info noTestsMessage))
.run(log, results, "")
}
final class TestsFailedException extends RuntimeException("Tests unsuccessful") with FeedbackProvidedException
| mdedetrich/sbt | main/actions/src/main/scala/sbt/Tests.scala | Scala | bsd-3-clause | 15,102 |
package com.eptcomputing.neo4j.rest
import org.scalatest.{BeforeAndAfterAll, FlatSpec}
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
import com.eptcomputing.neo4j.JerseyConverters
import org.codehaus.jettison.json.{JSONArray, JSONException, JSONObject}
import com.sun.jersey.test.framework.JerseyTest
import com.sun.jersey.api.client.UniformInterfaceException
@RunWith(classOf[JUnitRunner])
class Neo4jResourceSpec extends JerseyTest("com.eptcomputing.neo4j.rest.test")
with FlatSpec with ShouldMatchers with BeforeAndAfterAll with JerseyConverters {
// Run Jersey server while running tests
override def beforeAll { setUp }
override def afterAll { tearDown }
// Helper which creates a new entity via the API, and returns its ID.
def createEntity(entity: JSONObject) = {
val created = resource.path("/neo_resource").contentType("application/json").postResponse(entity)
created.getStatus should equal(201)
created.getLocation.getPath.replaceAll(".*/", "").toLong
}
// Helper which creates a JSON object from a list of key-value pairs
def json(pairs: Tuple2[String, Any]*) = {
val obj = new JSONObject
for ((key, value) <- pairs.toList) obj.put(key, value.asInstanceOf[AnyRef])
obj
}
"A Neo4jResource" should "return the new resource URL on POST" in {
val id = createEntity(json(("key", "value")))
val read = resource.path("/neo_resource/%d".format(id)).getJSON
read.get("key") should equal("value")
}
it should "update resource properties on PUT" in {
// Create new entity
val id = createEntity(json(("one", 1), ("two", 2), ("three", 3)))
// Delete one, update two, leave three unchanged, add four
val updated = json(("two", 22), ("three", 3), ("four", 4))
val readBack = resource.path("/neo_resource/%d".format(id)).contentType("application/json").putJSON(updated)
// Also do a separate read, and make sure both have the right contents
val readSeparate = resource.path("/neo_resource/%d".format(id)).getJSON
for (read <- Array(readBack, readSeparate)) {
evaluating { read.getInt("one") } should produce [JSONException]
read.getInt("two") should equal(22)
read.getInt("three") should equal(3)
read.getInt("four") should equal(4)
}
}
it should "update relationships on PUT" in {
// 1 <-- 2 <--> 3 and 1 <-- 3
val one = createEntity(json())
val two = createEntity(json(("_out", json(("ONE_TWO", one)))))
val three = createEntity(json(
("_in", json(("TWO_THREE", two))),
("_out", json(("TWO_THREE", two), ("ONE_TWO", json(("_end", one), ("foo", "bar")))))
))
val four = createEntity(json())
// Update to: 1 <--> 2 --> 3 and 1 <-- 3 and 2 --> 4
val twoUpdate = json(
("_in", json(("ONE_TWO", json(("_start", one), ("foo", "bar"))))),
("_out", json(("ONE_TWO", one), ("TWO_THREE", (new JSONArray).put(three).put(four))))
)
val readBack = resource.path("/neo_resource/%d".format(two)).contentType("application/json").putJSON(twoUpdate)
// Also do a separate read, and make sure both have the right contents
val readSeparate = resource.path("/neo_resource/%d".format(two)).getJSON
for (read <- Array(readBack, readSeparate)) {
val in = read.getJSONObject("_in")
val out = read.getJSONObject("_out")
in.getJSONObject("ONE_TWO").getInt("_start") should equal(one)
in.getJSONObject("ONE_TWO").getString("foo") should equal("bar")
out.getJSONObject("ONE_TWO").getInt("_end") should equal(one)
evaluating { in.getJSONObject("TWO_THREE") } should produce [JSONException]
evaluating { out.getJSONObject("ONE_TWO").getString("foo") } should produce [JSONException]
Set(
out.getJSONArray("TWO_THREE").getJSONObject(0).getInt("_end"),
out.getJSONArray("TWO_THREE").getJSONObject(1).getInt("_end")
) should equal(Set(three, four))
}
}
it should "delete a resource on DELETE" in {
// Create two new entities with a relationship
val id = createEntity(json(("key", "value")))
createEntity(json(("something", "else"), ("_out", json(("KNOWS", id)))))
// Delete the first and check that it has gone
val response = resource.path("/neo_resource/%d".format(id)).deleteJSON
response.get("key") should equal("value")
val thrown = evaluating {
resource.path("/neo_resource/%d".format(id)).getJSON
} should produce [UniformInterfaceException]
thrown.getResponse.getStatus should equal(404)
}
}
| ept/neo4j-resources | src/test/scala/com/eptcomputing/neo4j/rest/Neo4jResourceSpec.scala | Scala | mit | 4,613 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.