code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Copyright (c) 2015 by PROJECT Challenger
* All rights reserved.
*/
package com.autodesk.tct.challenger.data.repositories
/**
* Factory Interface - generates different repositories
*/
trait IRepositoryFactory {
/**
* Generate a repo for activity interacting
* @return the activity repository
*/
def activityRepository: IActivityRepository
/**
* Generate a repo for comment interacting
* @return the comment repository
*/
def commentRepository: ICommentRepository
/**
* Generate a repo for event interacting
* @return the event repository
*/
def eventRepository: IEventRepository
/**
* Generate a repo for event extension interacting
* @return the event extension repository
*/
def eventExtRepository: IEventExtensionRepository
/**
* Generate a repo for user interacting
* @return the user extension repository
*/
def userRepository: IUserRepository
/**
* Generate a repo for idea interacting
* @return the idea repository
*/
def ideaRepository: IIdeaRepository
}
/**
* Factory implementation - generates real repositories
*/
object RepositoryFactory {
var driverClassName: String = ""
var factoryInstance: Option[IRepositoryFactory] = None
/**
* Set the driver
* @param driverClass the full qualified class name
*/
def driver(driverClass: String): Unit = {
driverClassName = driverClass
}
/**
* Get the actual factory instance
* @return the evaluated factory
*/
def factory: IRepositoryFactory = factoryInstance match {
case Some(instance) => instance
case None =>
val newInstance = Class.forName(driverClassName).newInstance().asInstanceOf[IRepositoryFactory]
factoryInstance = Some(newInstance)
newInstance
}
}
| adsk-cp-tct/challenger-backend | challenger-common/src/main/scala/com/autodesk/tct/challenger/data/repositories/RepositoryFactory.scala | Scala | gpl-3.0 | 1,779 |
package org.beaucatcher.mongo
import com.typesafe.config._
private[mongo] class ContextSettings(config: Config) {
import scala.collection.JavaConverters._
config.checkValid(ConfigFactory.defaultReference(), "beaucatcher")
private val mongo = config.getConfig("beaucatcher.mongo")
val driverClassName = mongo.getString("driver")
val hosts: Seq[String] = if (mongo.hasPath("connection.hosts"))
mongo.getStringList("connection.hosts").asScala
else
Seq.empty // FIXME from URI
val defaultDatabaseName = if (mongo.hasPath("connection.default-database-name"))
mongo.getString("connection.default-database-name")
else
"" // FIXME from URI
private def uriFromParts: String = {
// FIXME not quite there yet ;-)
"mongodb://" + hosts.mkString(",") + "/" + defaultDatabaseName
}
// this will have to get more complex to use the other stuff
// in the config file
val uri: String = if (mongo.hasPath("uri"))
mongo.getString("uri")
else
uriFromParts
}
| havocp/beaucatcher | mongo/src/main/scala/org/beaucatcher/mongo/Settings.scala | Scala | apache-2.0 | 1,068 |
package exceptions
class CoordinateOutOfBoundsException extends Exception {
}
| shawes/zissou | src/main/scala/exceptions/CoordinateOutOfBoundsException.scala | Scala | mit | 80 |
/***
* Copyright 2017 Andrea Lorenzani
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
***/
package name.lorenzani.andrea.homeaway.services
import com.twitter.finagle.http.{Request, Response}
import com.twitter.util.Future
import name.lorenzani.andrea.homeaway.datastore.DataStore
trait RequestHandler {
def handle(request: Request): Future[Response]
}
object RequestHandler {
def getGetHandler: RequestHandler = new GetRequestHandler(DataStore.getDataStore)
def getPostHandler: RequestHandler = new PostRequestHandler(DataStore.getDataStore)
def getDelHandler: RequestHandler = new DelRequestHandler(DataStore.getDataStore)
def getPutHandler: RequestHandler = new PutRequestHandler(DataStore.getDataStore)
}
| andrealorenzani/HAVacationRental | src/main/scala/name/lorenzani/andrea/homeaway/services/RequestHandler.scala | Scala | apache-2.0 | 1,245 |
package xyz.hyperreal
import slick.driver.H2Driver.api._
import akka.event.Logging
import spray.http.{StatusCodes, HttpResponse, HttpHeaders, HttpEntity}
import spray.json.DefaultJsonProtocol._
import spray.json._
import spray.httpx.unmarshalling.MalformedContent
import org.joda.time.Instant
import concurrent._
import concurrent.duration._
package object cramsite {
val GUEST = 0
val USER = 1
val ADMIN = 2
val SUADMIN = 3
def await[T]( a: Awaitable[T] ) = Await.result( a, Duration.Inf )
val log = Logging( Main.akka, getClass )
implicit object InstantJsonFormat extends JsonFormat[Instant] {
def write(x: Instant) = JsObject(Map("millis" -> JsNumber(x.getMillis)))
def read(value: JsValue) = value match {
case JsObject(x) => new Instant(x("millis").asInstanceOf[JsNumber].value.longValue)
case x => sys.error("Expected Instant as JsObject, but got " + x)
}
}
lazy val rootid = dao.Files.findRoot.head.id.get
lazy val usersid = await( dao.Files.find(rootid, "Users") ).head.id.get
lazy val privateid = await( dao.Files.find(rootid, "Private") ).head.id.get
var defaultFolderid: Int = _
var defaultFileid: Int = _
var defaultUserid: Int = _
def ok( message: String = "" ) = HttpResponse( status = StatusCodes.OK, message )
def conflict( message: String ) = HttpResponse( status = StatusCodes.Conflict, message )
def badRequest( message: String ) = HttpResponse( status = StatusCodes.BadRequest, message )
} | edadma/cram-site | src/main/scala/cramsite.scala | Scala | mit | 1,460 |
/*
* Copyright 2014 Kevin Herron
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.digitalpetri.ethernetip.encapsulation.cpf.items
import com.digitalpetri.ethernetip.encapsulation.cpf.CpfItem
import com.digitalpetri.ethernetip.util.Buffers
import io.netty.buffer.ByteBuf
/**
* This address item shall be used when the encapsulated protocol is connection-oriented. The data shall contain a
* connection identifier.
*
* @param connectionId The connection identifier, exchanged in the Forward Open service of the Connection Manager.
*/
case class ConnectedAddressItem(connectionId: Int) extends CpfItem(ConnectedAddressItem.TypeId)
object ConnectedAddressItem {
val TypeId = 0xA1
val Length = 4
def encode(item: ConnectedAddressItem, buffer: ByteBuf = Buffers.unpooled()): ByteBuf = {
buffer.writeShort(item.typeId)
buffer.writeShort(Length)
buffer.writeInt(item.connectionId)
buffer
}
def decode(buffer: ByteBuf): ConnectedAddressItem = {
val typeId = buffer.readUnsignedShort()
val length = buffer.readUnsignedShort()
val connectionId = buffer.readInt()
assert(typeId == TypeId)
assert(length == Length)
ConnectedAddressItem(connectionId)
}
}
| digitalpetri/scala-ethernet-ip | enip-core/src/main/scala/com/digitalpetri/ethernetip/encapsulation/cpf/items/ConnectedAddressItem.scala | Scala | apache-2.0 | 1,749 |
package com.codahale.jerkson.tests
import com.codahale.jerkson.Json._
import com.codahale.simplespec.Spec
import com.codahale.jerkson.ParsingException
import java.io.ByteArrayInputStream
import org.junit.Test
class EdgeCaseSpec extends Spec {
class `Deserializing lists` {
@Test def `doesn't cache Seq builders` = {
parse[List[Int]]("[1,2,3,4]").must(be(List(1, 2, 3, 4)))
parse[List[Int]]("[1,2,3,4]").must(be(List(1, 2, 3, 4)))
}
}
class `Parsing a JSON array of ints with nulls` {
@Test def `should be readable as a List[Option[Int]]` = {
parse[List[Option[Int]]]("[1,2,null,4]").must(be(List(Some(1), Some(2), None, Some(4))))
}
}
class `Deserializing maps` {
@Test def `doesn't cache Map builders` = {
parse[Map[String, Int]](""" {"one":1, "two": 2} """).must(be(Map("one" -> 1, "two" -> 2)))
parse[Map[String, Int]](""" {"one":1, "two": 2} """).must(be(Map("one" -> 1, "two" -> 2)))
}
}
class `Parsing malformed JSON` {
@Test def `should throw a ParsingException with an informative message` = {
evaluating {
parse[Boolean]("jjf8;09")
}.must(throwA[ParsingException](
"Malformed JSON. Unexpected character ('j' (code 106)): expected a " +
"valid value (number, String, array, object, 'true', 'false' " +
"or 'null') at character offset 0."))
evaluating {
parse[CaseClass]("{\"ye\":1")
}.must(throwA[ParsingException](
"Malformed JSON. Unexpected end-of-input: expected close marker for " +
"OBJECT at character offset 20."))
}
}
class `Parsing invalid JSON` {
@Test def `should throw a ParsingException with an informative message` = {
evaluating {
parse[CaseClass]("900")
}.must(throwA[ParsingException](
("""Can not deserialize instance of com.codahale.jerkson.tests.CaseClass out of VALUE_NUMBER_INT token\n""" +
""" at \[Source: java.io.StringReader@[0-9a-f]+; line: 1, column: 1\]""").r))
evaluating {
parse[CaseClass]("{\"woo\": 1}")
}.must(throwA[ParsingException]("Invalid JSON. Needed [id, name], but found [woo]."))
}
}
class `Parsing an empty document` {
@Test def `should throw a ParsingException with an informative message` = {
val input = new ByteArrayInputStream(Array.empty)
evaluating {
parse[CaseClass](input)
}.must(throwA[ParsingException]("""No content to map due to end\-of\-input""".r))
}
}
}
| codahale/jerkson | src/test/scala/com/codahale/jerkson/tests/EdgeCaseSpec.scala | Scala | mit | 2,542 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactiveTests
import monix.execution.Scheduler.Implicits.global
import monix.reactive.Observable
import org.reactivestreams.Publisher
import org.reactivestreams.tck.PublisherVerification
import org.scalatestplus.testng.TestNGSuiteLike
import scala.util.Random
class ObservableToPublisherTest extends PublisherVerification[Long](env())
with TestNGSuiteLike {
def eval[A](x: A): Observable[A] = {
val n = Random.nextInt()
if (math.abs(n % 10) == 0)
Observable.now(x).executeAsync
else
Observable.now(x)
}
def createPublisher(elements: Long): Publisher[Long] = {
if (elements == Long.MaxValue)
Observable.repeat(1L).flatMap(eval)
.toReactivePublisher
else
Observable.range(0, elements).flatMap(eval)
.toReactivePublisher
}
def createFailedPublisher(): Publisher[Long] = {
Observable.raiseError(new RuntimeException("dummy"))
.asInstanceOf[Observable[Long]]
.toReactivePublisher
}
} | monixio/monix | reactiveTests/src/test/scala/monix/reactiveTests/ObservableToPublisherTest.scala | Scala | apache-2.0 | 1,662 |
package com.acrussell.lambda
import org.scalatest.FlatSpec
import com.acrussell.lambda.Semantics._
class PrettyPrintSpec extends FlatSpec {
behavior of "A lambda calculus pretty printer"
it should "remove unneeded parentheses" in {
assert(prettyPrint(LambdaParser("((((uv)x)y)z)").get) === "u v x y z")
}
it should "keep needed parenthesis" in {
assert(prettyPrint(LambdaParser("(((xy)z)(yx))").get) === "x y z (y x)")
}
}
| euclio/scala-lambda | src/test/scala/PrettyPrintTest.scala | Scala | mit | 446 |
/**
* Copyright 2012-2013 StackMob
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stackmob.newman
import scala.concurrent.duration._
import scala.concurrent._
import scalaz.Validation
import com.stackmob.newman.request.HttpRequest
import com.stackmob.newman.response.HttpResponse
package object test {
private[test] implicit val duration = 5.seconds
private[test] implicit class RichFuture[T](fut: Future[T]) {
def toEither(dur: Duration = duration): Either[Throwable, T] = {
Validation.fromTryCatch(block(dur)).toEither
}
def block(dur: Duration = duration): T = {
Await.result(fut, dur)
}
}
private[test] implicit class RichHttpRequest(req: HttpRequest) {
def block(duration: Duration = 500.milliseconds): HttpResponse = {
Await.result(req.apply, duration)
}
}
private[test] implicit class RichList[T](l: List[T]) {
def get(i: Int): Option[T] = {
if(i >= 0 && i < l.length) {
Some(l.apply(i))
} else {
None
}
}
}
}
| megamsys/newman | src/test/scala/com/stackmob/newman/test/package.scala | Scala | apache-2.0 | 1,547 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.controller
import _root_.org.apache.predictionio.annotation.DeveloperApi
import org.apache.predictionio.core.BaseAlgorithm
import org.apache.predictionio.workflow.PersistentModelManifest
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import scala.reflect._
/** Base class of a local algorithm.
*
* A local algorithm runs locally within a single machine and produces a model
* that can fit within a single machine.
*
* If your input query class requires custom JSON4S serialization, the most
* idiomatic way is to implement a trait that extends [[CustomQuerySerializer]],
* and mix that into your algorithm class, instead of overriding
* [[querySerializer]] directly.
*
* @tparam PD Prepared data class.
* @tparam M Trained model class.
* @tparam Q Input query class.
* @tparam P Output prediction class.
* @group Algorithm
*/
abstract class LAlgorithm[PD, M : ClassTag, Q, P]
extends BaseAlgorithm[RDD[PD], RDD[M], Q, P] {
override def trainBase(sc: SparkContext, pd: RDD[PD]): RDD[M] = pd.map(train)
/** Implement this method to produce a model from prepared data.
*
* @param pd Prepared data for model training.
* @return Trained model.
*/
def train(pd: PD): M
override def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)])
: RDD[(Long, P)] = {
val mRDD = bm.asInstanceOf[RDD[M]]
batchPredict(mRDD, qs)
}
/** This is a default implementation to perform batch prediction. Override
* this method for a custom implementation.
*
* @param mRDD A single model wrapped inside an RDD
* @param qs An RDD of index-query tuples. The index is used to keep track of
* predicted results with corresponding queries.
* @return Batch of predicted results
*/
def batchPredict(mRDD: RDD[M], qs: RDD[(Long, Q)]): RDD[(Long, P)] = {
val glomQs: RDD[Array[(Long, Q)]] = qs.glom()
val cartesian: RDD[(M, Array[(Long, Q)])] = mRDD.cartesian(glomQs)
cartesian.flatMap { case (m, qArray) =>
qArray.map { case (qx, q) => (qx, predict(m, q)) }
}
}
override def predictBase(localBaseModel: Any, q: Q): P = {
predict(localBaseModel.asInstanceOf[M], q)
}
/** Implement this method to produce a prediction from a query and trained
* model.
*
* @param m Trained model produced by [[train]].
* @param q An input query.
* @return A prediction.
*/
def predict(m: M, q: Q): P
/** :: DeveloperApi ::
* Engine developers should not use this directly (read on to see how local
* algorithm models are persisted).
*
* Local algorithms produce local models. By default, models will be
* serialized and stored automatically. Engine developers can override this behavior by
* mixing the [[PersistentModel]] trait into the model class, and
* PredictionIO will call [[PersistentModel.save]] instead. If it returns
* true, a [[org.apache.predictionio.workflow.PersistentModelManifest]] will be
* returned so that during deployment, PredictionIO will use
* [[PersistentModelLoader]] to retrieve the model. Otherwise, Unit will be
* returned and the model will be re-trained on-the-fly.
*
* @param sc Spark context
* @param modelId Model ID
* @param algoParams Algorithm parameters that trained this model
* @param bm Model
* @return The model itself for automatic persistence, an instance of
* [[org.apache.predictionio.workflow.PersistentModelManifest]] for manual
* persistence, or Unit for re-training on deployment
*/
@DeveloperApi
override
def makePersistentModel(
sc: SparkContext,
modelId: String,
algoParams: Params,
bm: Any): Any = {
// Check RDD[M].count == 1
val m = bm.asInstanceOf[RDD[M]].first()
m match {
case m: PersistentModel[Params] @unchecked =>
if(m.save(modelId, algoParams, sc)){
PersistentModelManifest(className = m.getClass.getName)
} else ()
case _ => m
}
}
}
| PredictionIO/PredictionIO | core/src/main/scala/org/apache/predictionio/controller/LAlgorithm.scala | Scala | apache-2.0 | 4,867 |
package com.scalakata
object Template {
import scalatags.Text.all._
import scalatags.Text.tags2.{title, noscript}
def echo(code: String) = {
"<!DOCTYPE html>" +
html(
head(
meta(charset:="utf-8"),
link(rel:="stylesheet", href:="/assets/lib/codemirror/addon/dialog/dialog.css"),
link(rel:="stylesheet", href:="/assets/lib/codemirror/addon/fold/foldgutter.css"),
link(rel:="stylesheet", href:="/assets/lib/codemirror/addon/hint/show-hint.css"),
link(rel:="stylesheet", href:="/assets/lib/codemirror/addon/scroll/simplescrollbars.css"),
link(rel:="stylesheet", href:="/assets/lib/codemirror/lib/codemirror.css"),
link(rel:="stylesheet", href:="/assets/lib/codemirror/theme/mdn-like.css"),
link(rel:="stylesheet", href:="/assets/lib/open-iconic/font/css/open-iconic.css"),
link(rel:="stylesheet", href:="/assets/main.css")
),
body(style := "margin:0")(
raw(code),
script(src := "/assets/lib/iframe-resizer/js/iframeResizer.contentWindow.min.js")
)
)
}
def txt(prod: Boolean) = {
val client = if(prod) "client-opt.js" else "client-fastopt.js"
"<!DOCTYPE html>" +
html(
head(
title("Scala Kata"),
base(href:="/"),
meta(charset:="utf-8"),
meta(name:="description", content:= "Interactive Playground for the Scala Programming Language"),
link(rel:="icon", `type`:="image/png", href:="/assets/favicon.ico"),
link(rel:="stylesheet", href:="/assets/lib/codemirror/addon/dialog/dialog.css"),
link(rel:="stylesheet", href:="/assets/lib/codemirror/addon/fold/foldgutter.css"),
link(rel:="stylesheet", href:="/assets/lib/codemirror/addon/hint/show-hint.css"),
link(rel:="stylesheet", href:="/assets/lib/codemirror/addon/scroll/simplescrollbars.css"),
link(rel:="stylesheet", href:="/assets/lib/codemirror/lib/codemirror.css"),
link(rel:="stylesheet", href:="/assets/lib/codemirror/theme/mdn-like.css"),
link(rel:="stylesheet", href:="/assets/lib/open-iconic/font/css/open-iconic.css"),
link(rel:="stylesheet", href:="/assets/main.css")
),
body(`class` := "cm-s-solarized cm-s-dark")(
div(`id` := "code")(
noscript("No Javscript, No Scala!"),
textarea(id := "scalakata", style := "display: none;"),
ul(`class` := "menu")(
li(id := "state", `class` := "oi", "data-glyph".attr := "media-play"),
li(id := "theme", "title".attr := "toggle theme (F2)", `class` := "oi", "data-glyph".attr := "sun"),
li(id := "help", "title".attr := "help (F1)", `class` := "oi", "data-glyph".attr := "question-mark"),
li(id := "share", "title".attr := "share (F7)", `class` := "oi", "data-glyph".attr := "share-boxed")
),
div(id := "shared")
),
script(src:="/assets/lib/codemirror/lib/codemirror.js"),
script(src:="/assets/lib/codemirror/addon/comment/comment.js"),
script(src:="/assets/lib/codemirror/addon/dialog/dialog.js"),
script(src:="/assets/lib/codemirror/addon/edit/closebrackets.js"),
script(src:="/assets/lib/codemirror/addon/edit/matchbrackets.js"),
script(src:="/assets/lib/codemirror/addon/fold/brace-fold.js"),
script(src:="/assets/lib/codemirror/addon/fold/foldcode.js"),
script(src:="/assets/lib/codemirror/addon/hint/show-hint.js"),
script(src:="/assets/lib/codemirror/addon/runmode/runmode.js"),
script(src:="/assets/lib/codemirror/addon/scroll/scrollpastend.js"),
script(src:="/assets/lib/codemirror/addon/scroll/simplescrollbars.js"),
script(src:="/assets/lib/codemirror/addon/search/match-highlighter.js"),
script(src:="/assets/lib/codemirror/addon/search/search.js"),
script(src:="/assets/lib/codemirror/addon/search/searchcursor.js"),
script(src:="/assets/lib/codemirror/keymap/sublime.js"),
script(src:="/assets/lib/codemirror/mode/clike/clike.js"),
script(src:="/assets/lib/pagedown/Markdown.Converter.js"),
script(src:="/assets/lib/pagedown/Markdown.Sanitizer.js"),
script(src:="/assets/lib/pagedown/Markdown.Extra.js"),
script(src:="/assets/lib/iframe-resizer/js/iframeResizer.min.js"),
script(src:=s"/assets/$client"),
raw("""<script>var codeReg = /<code>([\\s\\S]*?)<\\/code>/;</script>"""),
script("com.scalakata.Main().main()"),
script("""
if(window.location.hostname !== 'localhost') {
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-42764457-1', 'auto');
ga('send', 'pageview');
}
""")
)
)
}
}
| Jorginho211/TFG | ScalaKata2/webapp/jvm/src/main/scala/com.scalakata/Template.scala | Scala | gpl-3.0 | 5,067 |
import scala.quoted._
import scala.quoted.autolift
import scala.language.implicitConversions
case class Xml(parts: String, args: List[Any])
object XmlQuote {
// Encoding for
//
// implicit class SCOps(s: StringContext) {
// object xml {
// def apply(exprs: Any*) = ...
// def unapplySeq(...) = ...
// }
// }
object SCOps {
opaque type StringContext = scala.StringContext
def apply(sc: scala.StringContext): StringContext = sc
}
inline def (inline ctx: StringContext).xml <: SCOps.StringContext = SCOps(ctx)
inline def (inline ctx: SCOps.StringContext).apply(inline args: Any*): Xml =
${XmlQuote.impl('ctx, 'args)}
// inline def (inline ctx: SCOps.StringContext).unapplySeq(...): Xml = ...
def impl(receiver: Expr[SCOps.StringContext], args: Expr[Seq[Any]])(using QuoteContext): Expr[Xml] = {
val string = receiver match {
case '{ SCOps(${Unlifted(sc)}) } => sc.parts.mkString("??")
}
'{new Xml(${string}, $args.toList)}
}
}
| som-snytt/dotty | tests/run-macros/xml-interpolation-5/Macros_1.scala | Scala | apache-2.0 | 1,002 |
package com.sksamuel.elastic4s
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
import org.scalatest.Matchers
import org.scalatest.matchers.{ Matcher, MatchResult }
trait JsonSugar extends Matchers {
protected val mapper = new ObjectMapper with ScalaObjectMapper
mapper.registerModule(DefaultScalaModule)
def matchJsonResource(resourceName: String) = new JsonResourceMatcher(resourceName)
class JsonResourceMatcher(resourceName: String) extends Matcher[String] {
override def apply(left: String): MatchResult = {
val jsonResource = getClass.getResource(resourceName)
withClue(s"expected JSON resource [$resourceName] ") { jsonResource should not be null }
val expectedJson = mapper.readTree(jsonResource)
val actualJson = mapper.readTree(left)
MatchResult(
expectedJson == actualJson,
s"$actualJson did not match resource [$resourceName]: $expectedJson",
s"$actualJson did match resource [$resourceName]: $expectedJson"
)
}
}
}
| ExNexu/elastic4s | elastic4s-core/src/test/scala/com/sksamuel/elastic4s/JsonSugar.scala | Scala | apache-2.0 | 1,149 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.tail
package batches
import scala.reflect.ClassTag
import monix.execution.internal.Platform.recommendedBatchSize
/** The `Batch` is a [[BatchCursor]] factory, similar in spirit
* with Scala's [[scala.collection.Iterable Iterable]].
*
* Its [[Batch#cursor cursor()]] method can be called
* repeatedly to yield the same sequence.
*
* This class is provided as an alternative to Scala's
* [[scala.collection.Iterable Iterable]] because:
*
* - the list of supported operations is smaller
* - implementations specialized for primitives are provided
* to avoid boxing
* - it's a factory of [[BatchCursor]], which provides hints
* for `recommendedBatchSize`, meaning how many batch can
* be processed in a batches
*
* Used in the [[Iterant]] implementation.
*/
abstract class Batch[+A] extends Serializable {
def cursor(): BatchCursor[A]
/** Creates a new generator that will only return the first `n`
* elements of the source.
*/
def take(n: Int): Batch[A]
/** Creates a new generator from the source, with the first
* `n` elements dropped, of if `n` is higher than the length
* of the underlying collection, the it mirrors the source,
* whichever applies.
*/
def drop(n: Int): Batch[A]
/** Creates a new generator emitting an interval of the values
* produced by the source.
*
* @param from the index of the first generated element
* which forms part of the slice.
* @param until the index of the first element
* following the slice.
* @return a generator which emits the element of the source
* past the first `from` elements using `drop`,
* and then takes `until - from` elements,
* using `take`
*/
def slice(from: Int, until: Int): Batch[A]
/** Creates a new generator that maps all values produced by the source
* to new values using a transformation function.
*
* @param f is the transformation function
* @return a new generator which transforms every value produced by
* the source by applying the function `f` to it.
*/
def map[B](f: A => B): Batch[B]
/** Returns a generator over all the elements of the source
* that satisfy the predicate `p`. The order of the elements
* is preserved.
*
* @param p the predicate used to test values.
* @return a generator which produces those values of the
* source which satisfy the predicate `p`.
*/
def filter(p: A => Boolean): Batch[A]
/** Creates a generator by transforming values produced by the source
* with a partial function, dropping those values for which the partial
* function is not defined.
*
* @param pf the partial function which filters and maps the generator.
* @return a new generator which yields each value `x` produced by this
* generator for which `pf` is defined
*/
def collect[B](pf: PartialFunction[A,B]): Batch[B]
/** Applies a binary operator to a start value and all elements
* of this generator, going left to right.
*
* @param initial is the start value.
* @param op the binary operator to apply
* @tparam R is the result type of the binary operator.
*
* @return the result of inserting `op` between consecutive elements
* of this generator, going left to right with the start value
* `initial` on the left. Returns `initial` if the generator
* is empty.
*/
def foldLeft[R](initial: R)(op: (R,A) => R): R
/** Converts this generator into a Scala immutable `List`. */
def toList: List[A] = cursor().toList
/** Converts this generator into a standard `Array`. */
def toArray[B >: A : ClassTag]: Array[B] = cursor().toArray
/** Converts this generator into a Scala `Iterable`. */
def toIterable: Iterable[A] =
new Iterable[A] { def iterator: Iterator[A] = cursor().toIterator }
}
/** [[Batch]] builders.
*
* @define paramArray is the underlying reference to use for traversing
* and transformations
*
* @define paramArrayOffset is the offset to start from, which would have
* been zero by default
*
* @define paramArrayLength is the length of created cursor, which would
* have been `array.length` by default
*/
object Batch {
/** Given a list of elements, builds an array-backed [[Batch]] out of it. */
def apply[A](elems: A*): Batch[A] = {
val array = elems.asInstanceOf[Seq[AnyRef]].toArray
fromArray(array).asInstanceOf[Batch[A]]
}
/** Builds a [[Batch]] from a standard `Array`, with strict
* semantics on transformations.
*
* @param array $paramArray
*/
def fromArray[A](array: Array[A]): ArrayBatch[A] =
fromArray(array, 0, array.length)
/** Builds a [[Batch]] from a standard `Array`, with strict
* semantics on transformations.
*
* @param array $paramArray
* @param offset $paramArrayOffset
* @param length $paramArrayLength
*/
def fromArray[A](array: Array[A], offset: Int, length: Int): ArrayBatch[A] = {
val tp = ClassTag[A](array.getClass.getComponentType)
new ArrayBatch[A](array, offset, length)(tp)
}
/** Converts a Scala [[scala.collection.Iterable Iterable]] into a [[Batch]]. */
def fromIterable[A](iter: Iterable[A]): Batch[A] =
fromIterable(iter, recommendedBatchSize)
/** Converts a Scala [[scala.collection.Iterable Iterable]]
* into a [[Batch]].
*
* @param recommendedBatchSize specifies the
* [[BatchCursor.recommendedBatchSize]] for the generated `BatchCursor` instances
* of this `Batch`, specifying the batch size when doing eager processing.
*/
def fromIterable[A](iter: Iterable[A], recommendedBatchSize: Int): Batch[A] =
new GenericBatch[A] {
def cursor(): BatchCursor[A] =
BatchCursor.fromIterator(iter.iterator, recommendedBatchSize)
}
/** Builds a [[Batch]] from a Scala `Seq`, with lazy
* semantics on transformations.
*/
def fromSeq[A](seq: Seq[A]): Batch[A] = {
val bs = if (seq.hasDefiniteSize) recommendedBatchSize else 1
fromSeq(seq, bs)
}
/** Builds a [[Batch]] from a Scala `Seq`, with lazy
* semantics on transformations.
*/
def fromSeq[A](seq: Seq[A], recommendedBatchSize: Int): Batch[A] =
new SeqBatch(seq, recommendedBatchSize)
/** Builds a [[Batch]] from a Scala `IndexedSeq`, with strict
* semantics on transformations.
*/
def fromIndexedSeq[A](seq: IndexedSeq[A]): Batch[A] = {
val ref = seq.asInstanceOf[IndexedSeq[AnyRef]].toArray
fromArray(ref).asInstanceOf[Batch[A]]
}
/** Returns an empty generator instance. */
def empty[A]: Batch[A] = EmptyBatch
/** Returns a [[Batch]] specialized for `Boolean`.
*
* @param array $paramArray
*/
def booleans(array: Array[Boolean]): BooleansBatch =
booleans(array, 0, array.length)
/** Returns a [[Batch]] specialized for `Boolean`.
*
* @param array $paramArray
* @param offset $paramArrayOffset
* @param length $paramArrayLength
*/
def booleans(array: Array[Boolean], offset: Int, length: Int): BooleansBatch =
new BooleansBatch(new ArrayBatch(array, offset, length))
/** Returns a [[Batch]] specialized for `Byte`.
*
* @param array $paramArray
*/
def bytes(array: Array[Byte]): BytesBatch =
bytes(array, 0, array.length)
/** Returns a [[Batch]] specialized for `Byte`.
*
* @param array $paramArray
* @param offset $paramArrayOffset
* @param length $paramArrayLength
*/
def bytes(array: Array[Byte], offset: Int, length: Int): BytesBatch =
new BytesBatch(new ArrayBatch(array, offset, length))
/** Returns a [[Batch]] specialized for `Char`.
*
* @param array $paramArray
*/
def chars(array: Array[Char]): CharsBatch =
chars(array, 0, array.length)
/** Returns a [[Batch]] specialized for `Char`.
*
* @param array $paramArray
* @param offset $paramArrayOffset
* @param length $paramArrayLength
*/
def chars(array: Array[Char], offset: Int, length: Int): CharsBatch =
new CharsBatch(new ArrayBatch(array, offset, length))
/** Returns a [[Batch]] specialized for `Int`.
*
* @param array $paramArray
* @param offset $paramArrayOffset
* @param length $paramArrayLength
*/
def integers(array: Array[Int], offset: Int, length: Int): IntegersBatch =
new IntegersBatch(new ArrayBatch(array, offset, length))
/** Returns a [[Batch]] specialized for `Int`.
*
* @param array $paramArray
*/
def integers(array: Array[Int]): IntegersBatch =
integers(array, 0, array.length)
/** Returns a [[Batch]] specialized for `Long`.
*
* @param array $paramArray
*/
def longs(array: Array[Long]): LongsBatch =
longs(array, 0, array.length)
/** Returns a [[Batch]] specialized for `Long`.
*
* @param array $paramArray
* @param offset $paramArrayOffset
* @param length $paramArrayLength
*/
def longs(array: Array[Long], offset: Int, length: Int): LongsBatch =
new LongsBatch(new ArrayBatch(array, offset, length))
/** Returns a [[Batch]] specialized for `Double`.
*
* @param array $paramArray
*/
def doubles(array: Array[Double]): DoublesBatch =
doubles(array, 0, array.length)
/** Returns a [[Batch]] specialized for `Double`.
*
* @param array $paramArray
* @param offset $paramArrayOffset
* @param length $paramArrayLength
*/
def doubles(array: Array[Double], offset: Int, length: Int): DoublesBatch =
new DoublesBatch(new ArrayBatch(array, offset, length))
/** A generator producing equally spaced values in some integer interval.
*
* @param from the start value of the generator
* @param until the end value of the generator (the first value NOT returned)
* @param step the increment value of the generator (must be positive or negative)
* @return the generator producing values `from, from + step, ...` up to, but excluding `end`
*/
def range(from: Int, until: Int, step: Int = 1): Batch[Int] =
new GenericBatch[Int] {
def cursor(): BatchCursor[Int] = BatchCursor.range(from, until, step)
}
} | Wogan/monix | monix-tail/shared/src/main/scala/monix/tail/batches/Batch.scala | Scala | apache-2.0 | 10,917 |
package com.github.mdr.mash.assist
import com.github.mdr.mash.functions.{ MashFunction, MashMethod }
import com.github.mdr.mash.inference.Type
import com.github.mdr.mash.inference.Type.UserClass
sealed trait Assistable
object Assistable {
case class Function(f: MashFunction) extends Assistable
case class FunctionType(f: Type.UserDefinedFunction) extends Assistable
case class Method(method: MashMethod) extends Assistable
case class MethodType(method: Type.UserDefinedFunction) extends Assistable
case class ConstructorType(userClass: UserClass) extends Assistable
}
case class AssistanceState(assistable: Assistable)
| mdr/mash | src/main/scala/com/github/mdr/mash/assist/AssistanceState.scala | Scala | mit | 640 |
package edu.umd.mith.hathi
case class Page(
metadata: PageMetadata,
contents: String
)
case class Volume(
metadata: VolumeMetadata,
pages: List[Page]
)
| umd-mith/hathi | core/src/main/scala/hathi/data.scala | Scala | apache-2.0 | 162 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml
import scala.collection.mutable
import scala.concurrent.duration._
import scala.language.postfixOps
import org.apache.hadoop.fs.Path
import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.when
import org.scalatest.BeforeAndAfterEach
import org.scalatest.concurrent.Eventually
import org.scalatest.mockito.MockitoSugar.mock
import org.apache.spark.SparkFunSuite
import org.apache.spark.ml.param.ParamMap
import org.apache.spark.ml.util.{DefaultParamsReader, DefaultParamsWriter, MLWriter}
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.scheduler.{SparkListener, SparkListenerEvent}
import org.apache.spark.sql._
import org.apache.spark.util.JsonProtocol
class MLEventsSuite
extends SparkFunSuite with BeforeAndAfterEach with MLlibTestSparkContext with Eventually {
private val events = mutable.ArrayBuffer.empty[MLEvent]
private val listener: SparkListener = new SparkListener {
override def onOtherEvent(event: SparkListenerEvent): Unit = event match {
case e: MLEvent => events.append(e)
case _ =>
}
}
override def beforeAll(): Unit = {
super.beforeAll()
spark.sparkContext.addSparkListener(listener)
}
override def afterEach(): Unit = {
try {
events.clear()
} finally {
super.afterEach()
}
}
override def afterAll(): Unit = {
try {
if (spark != null) {
spark.sparkContext.removeSparkListener(listener)
}
} finally {
super.afterAll()
}
}
abstract class MyModel extends Model[MyModel]
test("pipeline fit events") {
val estimator1 = mock[Estimator[MyModel]]
val model1 = mock[MyModel]
val transformer1 = mock[Transformer]
val estimator2 = mock[Estimator[MyModel]]
val model2 = mock[MyModel]
when(estimator1.copy(any[ParamMap])).thenReturn(estimator1)
when(model1.copy(any[ParamMap])).thenReturn(model1)
when(transformer1.copy(any[ParamMap])).thenReturn(transformer1)
when(estimator2.copy(any[ParamMap])).thenReturn(estimator2)
when(model2.copy(any[ParamMap])).thenReturn(model2)
val dataset1 = mock[DataFrame]
val dataset2 = mock[DataFrame]
val dataset3 = mock[DataFrame]
val dataset4 = mock[DataFrame]
val dataset5 = mock[DataFrame]
when(dataset1.toDF).thenReturn(dataset1)
when(dataset2.toDF).thenReturn(dataset2)
when(dataset3.toDF).thenReturn(dataset3)
when(dataset4.toDF).thenReturn(dataset4)
when(dataset5.toDF).thenReturn(dataset5)
when(estimator1.fit(meq(dataset1))).thenReturn(model1)
when(model1.transform(meq(dataset1))).thenReturn(dataset2)
when(model1.parent).thenReturn(estimator1)
when(transformer1.transform(meq(dataset2))).thenReturn(dataset3)
when(estimator2.fit(meq(dataset3))).thenReturn(model2)
val pipeline = new Pipeline()
.setStages(Array(estimator1, transformer1, estimator2))
assert(events.isEmpty)
val pipelineModel = pipeline.fit(dataset1)
val event0 = FitStart[PipelineModel]()
event0.estimator = pipeline
event0.dataset = dataset1
val event1 = FitStart[MyModel]()
event1.estimator = estimator1
event1.dataset = dataset1
val event2 = FitEnd[MyModel]()
event2.estimator = estimator1
event2.model = model1
val event3 = TransformStart()
event3.transformer = model1
event3.input = dataset1
val event4 = TransformEnd()
event4.transformer = model1
event4.output = dataset2
val event5 = TransformStart()
event5.transformer = transformer1
event5.input = dataset2
val event6 = TransformEnd()
event6.transformer = transformer1
event6.output = dataset3
val event7 = FitStart[MyModel]()
event7.estimator = estimator2
event7.dataset = dataset3
val event8 = FitEnd[MyModel]()
event8.estimator = estimator2
event8.model = model2
val event9 = FitEnd[PipelineModel]()
event9.estimator = pipeline
event9.model = pipelineModel
val expected = Seq(
event0, event1, event2, event3, event4, event5, event6, event7, event8, event9)
eventually(timeout(10 seconds), interval(1 second)) {
assert(events === expected)
}
// Test if they can be ser/de via JSON protocol.
assert(events.nonEmpty)
events.map(JsonProtocol.sparkEventToJson).foreach { event =>
assert(JsonProtocol.sparkEventFromJson(event).isInstanceOf[MLEvent])
}
}
test("pipeline model transform events") {
val dataset1 = mock[DataFrame]
val dataset2 = mock[DataFrame]
val dataset3 = mock[DataFrame]
val dataset4 = mock[DataFrame]
when(dataset1.toDF).thenReturn(dataset1)
when(dataset2.toDF).thenReturn(dataset2)
when(dataset3.toDF).thenReturn(dataset3)
when(dataset4.toDF).thenReturn(dataset4)
val transformer1 = mock[Transformer]
val model = mock[MyModel]
val transformer2 = mock[Transformer]
when(transformer1.transform(meq(dataset1))).thenReturn(dataset2)
when(model.transform(meq(dataset2))).thenReturn(dataset3)
when(transformer2.transform(meq(dataset3))).thenReturn(dataset4)
val newPipelineModel = new PipelineModel(
"pipeline0", Array(transformer1, model, transformer2))
assert(events.isEmpty)
val output = newPipelineModel.transform(dataset1)
val event0 = TransformStart()
event0.transformer = newPipelineModel
event0.input = dataset1
val event1 = TransformStart()
event1.transformer = transformer1
event1.input = dataset1
val event2 = TransformEnd()
event2.transformer = transformer1
event2.output = dataset2
val event3 = TransformStart()
event3.transformer = model
event3.input = dataset2
val event4 = TransformEnd()
event4.transformer = model
event4.output = dataset3
val event5 = TransformStart()
event5.transformer = transformer2
event5.input = dataset3
val event6 = TransformEnd()
event6.transformer = transformer2
event6.output = dataset4
val event7 = TransformEnd()
event7.transformer = newPipelineModel
event7.output = output
val expected = Seq(event0, event1, event2, event3, event4, event5, event6, event7)
eventually(timeout(10 seconds), interval(1 second)) {
assert(events === expected)
}
// Test if they can be ser/de via JSON protocol.
assert(events.nonEmpty)
events.map(JsonProtocol.sparkEventToJson).foreach { event =>
assert(JsonProtocol.sparkEventFromJson(event).isInstanceOf[MLEvent])
}
}
test("pipeline read/write events") {
def getInstance(w: MLWriter): AnyRef =
w.getClass.getDeclaredMethod("instance").invoke(w)
withTempDir { dir =>
val path = new Path(dir.getCanonicalPath, "pipeline").toUri.toString
val writableStage = new WritableStage("writableStage")
val newPipeline = new Pipeline().setStages(Array(writableStage))
val pipelineWriter = newPipeline.write
assert(events.isEmpty)
pipelineWriter.save(path)
eventually(timeout(10 seconds), interval(1 second)) {
events.foreach {
case e: SaveInstanceStart if e.writer.isInstanceOf[DefaultParamsWriter] =>
assert(e.path.endsWith("writableStage"))
case e: SaveInstanceEnd if e.writer.isInstanceOf[DefaultParamsWriter] =>
assert(e.path.endsWith("writableStage"))
case e: SaveInstanceStart if getInstance(e.writer).isInstanceOf[Pipeline] =>
assert(getInstance(e.writer).asInstanceOf[Pipeline].uid === newPipeline.uid)
case e: SaveInstanceEnd if getInstance(e.writer).isInstanceOf[Pipeline] =>
assert(getInstance(e.writer).asInstanceOf[Pipeline].uid === newPipeline.uid)
case e => fail(s"Unexpected event thrown: $e")
}
}
// Test if they can be ser/de via JSON protocol.
assert(events.nonEmpty)
events.map(JsonProtocol.sparkEventToJson).foreach { event =>
assert(JsonProtocol.sparkEventFromJson(event).isInstanceOf[MLEvent])
}
sc.listenerBus.waitUntilEmpty(timeoutMillis = 10000)
events.clear()
val pipelineReader = Pipeline.read
assert(events.isEmpty)
pipelineReader.load(path)
eventually(timeout(10 seconds), interval(1 second)) {
events.foreach {
case e: LoadInstanceStart[PipelineStage]
if e.reader.isInstanceOf[DefaultParamsReader[PipelineStage]] =>
assert(e.path.endsWith("writableStage"))
case e: LoadInstanceEnd[PipelineStage]
if e.reader.isInstanceOf[DefaultParamsReader[PipelineStage]] =>
assert(e.instance.isInstanceOf[PipelineStage])
case e: LoadInstanceStart[Pipeline] =>
assert(e.reader === pipelineReader)
case e: LoadInstanceEnd[Pipeline] =>
assert(e.instance.uid === newPipeline.uid)
case e => fail(s"Unexpected event thrown: $e")
}
}
// Test if they can be ser/de via JSON protocol.
assert(events.nonEmpty)
events.map(JsonProtocol.sparkEventToJson).foreach { event =>
assert(JsonProtocol.sparkEventFromJson(event).isInstanceOf[MLEvent])
}
}
}
test("pipeline model read/write events") {
def getInstance(w: MLWriter): AnyRef =
w.getClass.getDeclaredMethod("instance").invoke(w)
withTempDir { dir =>
val path = new Path(dir.getCanonicalPath, "pipeline").toUri.toString
val writableStage = new WritableStage("writableStage")
val pipelineModel =
new PipelineModel("pipeline_89329329", Array(writableStage.asInstanceOf[Transformer]))
val pipelineWriter = pipelineModel.write
assert(events.isEmpty)
pipelineWriter.save(path)
eventually(timeout(10 seconds), interval(1 second)) {
events.foreach {
case e: SaveInstanceStart if e.writer.isInstanceOf[DefaultParamsWriter] =>
assert(e.path.endsWith("writableStage"))
case e: SaveInstanceEnd if e.writer.isInstanceOf[DefaultParamsWriter] =>
assert(e.path.endsWith("writableStage"))
case e: SaveInstanceStart if getInstance(e.writer).isInstanceOf[PipelineModel] =>
assert(getInstance(e.writer).asInstanceOf[PipelineModel].uid === pipelineModel.uid)
case e: SaveInstanceEnd if getInstance(e.writer).isInstanceOf[PipelineModel] =>
assert(getInstance(e.writer).asInstanceOf[PipelineModel].uid === pipelineModel.uid)
case e => fail(s"Unexpected event thrown: $e")
}
}
// Test if they can be ser/de via JSON protocol.
assert(events.nonEmpty)
events.map(JsonProtocol.sparkEventToJson).foreach { event =>
assert(JsonProtocol.sparkEventFromJson(event).isInstanceOf[MLEvent])
}
sc.listenerBus.waitUntilEmpty(timeoutMillis = 10000)
events.clear()
val pipelineModelReader = PipelineModel.read
assert(events.isEmpty)
pipelineModelReader.load(path)
eventually(timeout(10 seconds), interval(1 second)) {
events.foreach {
case e: LoadInstanceStart[PipelineStage]
if e.reader.isInstanceOf[DefaultParamsReader[PipelineStage]] =>
assert(e.path.endsWith("writableStage"))
case e: LoadInstanceEnd[PipelineStage]
if e.reader.isInstanceOf[DefaultParamsReader[PipelineStage]] =>
assert(e.instance.isInstanceOf[PipelineStage])
case e: LoadInstanceStart[PipelineModel] =>
assert(e.reader === pipelineModelReader)
case e: LoadInstanceEnd[PipelineModel] =>
assert(e.instance.uid === pipelineModel.uid)
case e => fail(s"Unexpected event thrown: $e")
}
}
// Test if they can be ser/de via JSON protocol.
assert(events.nonEmpty)
events.map(JsonProtocol.sparkEventToJson).foreach { event =>
assert(JsonProtocol.sparkEventFromJson(event).isInstanceOf[MLEvent])
}
}
}
}
| yanboliang/spark | mllib/src/test/scala/org/apache/spark/ml/MLEventsSuite.scala | Scala | apache-2.0 | 12,733 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package views.helper
import org.jsoup.Jsoup
import org.jsoup.nodes.Document
import org.mockito.Mockito.mock
import org.scalatestplus.play.PlaySpec
import org.scalatestplus.play.guice.GuiceOneAppPerSuite
import play.api.i18n.{I18nSupport, Lang, MessagesApi}
import play.api.mvc.AnyContentAsEmpty
import play.api.test.FakeRequest
import play.twirl.api.Html
import uk.gov.hmrc.http.HeaderCarrier
import scala.concurrent.Future
trait PBIKViewBehaviours extends PlaySpec with JsoupMatchers {
def view: Html
def doc: Document = Jsoup.parse(view.toString())
def doc(view: Html): Document = Jsoup.parse(view.toString())
def pageWithTitle(titleText: String): Unit =
"have a static title" in {
doc.title must include(titleText)
}
def pageWithHeader(headerText: String): Unit =
"have a static h1 header" in {
doc must haveHeadingWithText(headerText)
}
def pageWithHeaderH2(headerText: String): Unit =
"have a static h2 header" in {
doc must haveHeadingH2WithText(headerText)
}
def pageWithBackLink(): Unit =
"have a back link" in {
doc must haveBackLink
}
def pageWithIdAndText(pageText: String, id: String): Unit =
s"have a static text ($pageText) with id ($id)" in {
doc must haveElementWithIdAndText(pageText, id)
}
def pageWithYesNoRadioButton(idYes: String, idNo: String) {
"have a yes/no radio button" in {
doc.getElementById(idYes) must not be null
doc.getElementById(idNo) must not be null
}
}
def pageWithTextBox(id: String, label: String): Unit =
s"have a text box with label $label" in {
doc must haveInputLabelWithText(id, label)
}
def pageWithLink(text: String, href: String): Unit =
s"have a link with url $href and text $text" in {
val a = doc.select(s"a[href=$href]").first()
a must not be null
a.text.trim mustBe text.trim
}
def pageWithContinueButtonForm(submitUrl: String, buttonText: String): Unit =
pageWithButtonForm(submitUrl, buttonText)
def nonBreakable(string: String): String = string.replace(" ", "\\u00A0")
def pageWithButtonForm(submitUrl: String, buttonText: String): Unit = {
"have a form with a submit button or input labelled as buttonText" in {
doc must haveSubmitButton(buttonText)
}
"have a form with the correct submit url" in {
doc must haveFormWithSubmitUrl(submitUrl)
}
}
}
trait PBIKBaseViewSpec extends PlaySpec with GuiceOneAppPerSuite with I18nSupport {
implicit val lang: Lang = Lang("en-GB")
implicit val request: FakeRequest[AnyContentAsEmpty.type] = FakeRequest()
implicit val messages: MessagesApi = app.injector.instanceOf[MessagesApi]
}
trait PBIKViewSpec extends PBIKBaseViewSpec with PBIKViewBehaviours
| hmrc/pbik-frontend | test/views/helper/PBIKViewSpec.scala | Scala | apache-2.0 | 3,372 |
package edu.holycross.shot.ohco2
import edu.holycross.shot.cite._
import scala.scalajs.js
import scala.scalajs.js.annotation._
/**
*
* @param urn URN for the version.
* @param label Label for citation scheme, with levels
* separated by "/", e.g., "book/chapter
*/
@JSExportAll case class CitationLabel(urn: CtsUrn, citationScheme: String) {
}
| cite-architecture/ohco2 | shared/src/main/scala/edu/holycross/shot/ohco2/CitationLabel.scala | Scala | gpl-3.0 | 347 |
package tuner
import scala.collection.Iterable
import scala.collection.immutable.HashMap
import scala.collection.immutable.HashSet
import scala.collection.immutable.SortedSet
import scala.collection.immutable.TreeSet
import scala.collection.mutable.ArrayBuffer
import com.typesafe.scalalogging.slf4j.LazyLogging
import tuner.util.FileReader
object Table extends LazyLogging {
type Tuple = Map[String,Float]
type Filter = Tuple => Tuple
def fromCsv(filename:String) = {
logger.info("reading " + filename + "...")
val tbl = new Table
val file = FileReader.read(filename)
// First line is the header
// Try and detect the separation character
try {
var header:List[String] = Nil
var delim = ","
while(header.length <= 1 && file.hasNext) {
val rawHeader = file.next
delim =
if(rawHeader.split(",").toList.length > 1) ","
else "\\t"
header = rawHeader.split(delim).toList map {
_.replaceAll("(^\\"|\\"$)", "")
}
}
file.foreach((line) => {
val splitLine = line.split(delim) map {_.toFloat}
tbl.addRow(header.zip(splitLine))
})
} catch {
case nse:java.util.NoSuchElementException =>
logger.warn("can't find header")
}
tbl
}
def fromLists(fieldNames:List[String], data:List[List[Float]]) = {
val tbl = new Table
data.foreach {dr =>
tbl.addRow(fieldNames.zip(dr))
}
tbl
}
// Some fun filters
def fieldFilter(rmFields:List[String]) : Filter = {
{tpl => tpl.filterKeys {k => rmFields.indexOf(k) == -1}}
}
def rangeFilter(ranges:List[(String, (Float, Float))]) : Filter = {
val mins:Map[String,Float] = ranges.map({r=>(r._1,r._2._1)}).toMap
val maxes:Map[String,Float] = ranges.map({r=>(r._1,r._2._2)}).toMap
{tpl =>
val minOk = mins.forall(mn => {tpl.getOrElse(mn._1, mn._2 + 1) >= mn._2})
val maxOk = maxes.forall(mx => {tpl.getOrElse(mx._1, mx._2 - 1) <= mx._2})
logger.debug("mn: " + minOk + " mx: " + maxOk)
if(minOk && maxOk)
tpl
else
null
}
}
def notSubsetFilter(tbl:Table) : Filter = {
{tpl =>
var out:Table.Tuple = null
for(r <- 0 until tbl.numRows) {
val tpl2 = tbl.tuple(r)
if(out == null &&
(tpl.forall {case (fld,v) => tpl2.get(fld).forall(x=>x == v)})) {
out = tpl
}
}
out
}
}
}
class Table extends LazyLogging {
val data:ArrayBuffer[Table.Tuple] = new ArrayBuffer
def addRow(values:List[(String, Float)]) = {
data += values.toMap
}
def removeRow(row:Int) = data.remove(row)
def clear = data.clear
def isEmpty = data.isEmpty
def min(col:String) : Float = {
data.map({_.get(col)}).flatten.min
}
def max(col:String) : Float = {
data.map({_.get(col)}).flatten.max
}
def fieldNames : List[String] = {
data.foldLeft(Set[String]())({(st:Set[String],tpl:Table.Tuple) => st ++ tpl.keys.toSet}).toList
}
def numRows : Int = data.size
def numFields : Int = fieldNames.size
def equals(other:Table) = {
// we finish quick if tables aren't same dimensions
if(numRows != other.numRows || numFields != other.numFields) {
false
} else {
data == other.data
}
}
def values(col:String) : Seq[Float] = {
data.map({_.get(col)}).flatten.sorted
}
def to2dMatrix(rowField:String, colField:String, valField:String) : Grid2D = {
// First collect all the columns from the datastore
val rowVals = values(rowField)
val colVals = values(colField)
val m = new Grid2D(rowVals toList, colVals toList)
// Now populate the matrix
data.foreach(v => {
// Only set the value if we have information at that point
(v.get(rowField), v.get(colField), v.get(valField)) match {
case (Some(rowV), Some(colV), Some(value)) =>
m.set(rowVals.takeWhile({_ < rowV}).size,
colVals.takeWhile({_ < colV}).size,
value)
case _ =>
}
})
m
}
def iterator = data.iterator
def columnValue(colName:String, row:Int) : Option[Float] = {
data(row).get(colName)
}
def setColumnValue(colName:String, row:Int, value:Float) = {
data(row) = data(row) + (colName -> value)
}
def tuple(row:Int) : Table.Tuple = {
data(row) + (Config.rowField -> row)
}
def map[A](f:Table.Tuple=>A) = data.map(f)
// Adds all rows of t to this table
def merge(t:Table) = {
for(r <- 0 until t.numRows)
addRow(t.tuple(r).toList)
}
def filter(f:Table.Filter) : Table = {
val outTbl = new Table
for(r <- 0 until numRows) {
val tpl = f(tuple(r))
if(tpl != null)
outTbl.addRow(tpl.toList)
}
outTbl
}
def subsample(startRow:Int, numRows:Int) : Table = {
val t = new Table
// silently return less than numRows if we have less
val extractRows = math.min(numRows, this.numRows)
for(r <- startRow until (startRow+extractRows)) {
t.addRow(tuple(r).toList)
}
t
}
def toRanges : DimRanges = toRanges(Nil)
def toRanges(filterFields:List[String]) : DimRanges = {
val fns = if(filterFields == Nil) {
fieldNames
} else {
fieldNames.filterNot({fn => filterFields.contains(fn)})
}
val ranges = fns.map({fn => (fn, (min(fn), max(fn)))}).toMap
new DimRanges(ranges)
}
override def toString : String = if(numRows == 0) {
"(empty table)"
} else {
val header = fieldNames.reduceLeft(_ + " " + _)
val rows = data.map {row =>
fieldNames.map {fn => row(fn).toString} reduceLeft(_ + " " + _)
}
header + "\\n" + rows.reduceLeft(_ + "\\n" + _)
}
def toCsv(filename:String) = {
logger.info("writing " + filename + "...")
val file = new java.io.FileWriter(filename)
val header = if(numRows > 0) {
val row0 = tuple(0)
//val (hdr, _) = row0.unzip
val hdr = row0.keys.filter({x => x != "rowNum"}).toList
// write out the header
file.write(hdr.mkString(",") + "\\n")
hdr
} else {
Nil
}
for(r <- 0 until numRows) {
val tpl = tuple(r)
val vals = header.map(tpl(_)).map(_.toString)
file.write(vals.mkString(",") + "\\n")
}
file.close
}
}
| gabysbrain/tuner | src/main/scala/tuner/Table.scala | Scala | mit | 6,320 |
package dispatch.foursquare
import dispatch._
import dispatch.oauth._
import dispatch.oauth.OAuth._
import dispatch.liftjson.Js._
import net.liftweb.json._
import net.liftweb.json.JsonAST._
/** Client is a function to wrap API operations */
abstract class Client extends ((Request => Request) => Request) {
import Http.builder2product
val agent = Map("User-Agent" -> "Dispatch Foursquare:0.0.1")
val host = :/("api.foursquare.com") / "v1" <:< agent
def call[T](method: Method[T])(implicit http: Http): T = {
http(method.defaultHandler(apply(method)))
}
}
case class OAuthClient(consumer: Consumer, accessToken: Token) extends Client {
def apply(block: Request => Request): Request =
block(host) <@ (consumer, accessToken)
}
case class BasicAuthClient(usernameOrEmail: String, password: String) extends Client {
def apply(block: Request => Request): Request =
block(host) as_!(usernameOrEmail, password)
}
object Auth {
val host = :/("foursquare.com")
val svc = host / "oauth"
def requestToken(consumer: Consumer) =
svc.POST / "request_token" <@ consumer as_token
def authorizeUrl(reqToken: Token) = svc / "authorize" <<? reqToken
def accessToken(consumer: Consumer, token: Token) =
svc.POST / "access_token" <@ (consumer, token) as_token
}
trait Method[T] extends Builder[Request => Request] {
/** default handler used by Client#call. You can also apply the client
to a Method and define your own request handler. */
def defaultHandler: Request => Handler[T]
}
object Cities extends CityBuilder(Map()) {
/** get a list of recently active cities */
def recentlyActive = new CitiesBuilder
/** switch the auth user's default city */
def switch(cityid: Long) = new CitySwitchBuilder(Map(
"cityid" -> cityid
))
}
private [foursquare] class CitiesBuilder extends Method[List[JValue]] {
def product = (_: Request) / "cities.json"
def defaultHandler = _ ># ('cities ? ary)
}
private [foursquare] class CityBuilder(val params: Map[String, Any]) extends Method[List[JField]] {
private def param(k: String)(v: Any) = new CityBuilder(params + (k -> v))
/** get a City at or nearest geolat and geolong */
def at(geolat: Double, geolong: Double) =
param("geolat")(geolat).param("geolong")(geolong)
def at(geoLatLong: (Double,Double)) =
param("geolat")(geoLatLong._1).param("geolong")(geoLatLong._2)
def product = (_: Request) / "checkcity.json" <<? params
def defaultHandler = _ ># ('city ? obj)
}
private [foursquare] class CitySwitchBuilder(val params: Map[String, Any]) extends Method[List[JField]] {
def product = (_: Request) / "switchcity.json" << params
def defaultHandler = _ ># 'data ? obj
}
object City {
val id = 'id ? int
val timezone = 'timezone ? str
val name = 'name ? str
val shortName = 'shortname ? str
val geolat = 'geolat ? obj // TODO impl double in dispatch.liftjson.Js
val geolong = 'geolong ? obj // ^ ^
}
object CitySwitch {
// BUG?: parsed field comes back as JString rather than JInt List(JField(status,JString(1))
val status = 'status ? str
val message = 'message ? str
}
object Checkins extends CheckinsBuilder(Map()) {
/** provides a means to checkin, shout, and post details */
def checkIn = new CheckinBuilder(Map())
/** get a list of checkins for the authed user */
def history = new HistoryBuilder(Map())
}
private [foursquare] class CheckinsBuilder(val params: Map[String, Any]) extends Method[List[JValue]] {
private def param(k: String)(v: Any) = new CheckinsBuilder(params + (k -> v))
/** get a list friends of checkin's @ near a geolat + geolong */
def at(geolat: Double, geolong: Double) =
param("geolat")(geolat).param("geolong")(geolong)
def at(geoLatLong: (Double,Double)) =
param("geolat")(geoLatLong._1).param("geolong")(geoLatLong._2)
def product = (_: Request) / "checkins.json" <<? params
def defaultHandler = _ ># ('checkins ? ary)
}
private [foursquare] class CheckinBuilder(val params: Map[String, Any]) extends Method[List[JField]] {
private def param(k: String)(v: Any) = new CheckinBuilder(params + (k -> v))
def at(vid: Long) = param("vid")(vid)
def at(venue: String) = param("venue")(venue)
def at(geolat: Double, geolong: Double) =
param("geolat")(geolat).param("geolong")(geolong)
def at(geoLatLong: (Double,Double)) =
param("geolat")(geoLatLong._1).param("geolong")(geoLatLong._2)
val shouting = param("shout")_
def privately = param("private")(1)
def publicly = param("private")(0)
def withoutTweet = param("twitter")(0)
def withTweet = param("twitter")(1)
def notPostingToFacebook = param("facebook")(0)
def postingToFacebook = param("facebook")(1)
def product = (_: Request) / "checkin.json" << params
def defaultHandler = _ ># ('checkin ? obj)
}
private [foursquare] class HistoryBuilder(val params: Map[String, Any]) extends Method[List[JValue]] {
private def param(k: String)(v: Any) = new HistoryBuilder(params + (k -> v))
val limit = param("l")_
val since = param("sinceid")_
def product = (_: Request) / "history.json" <<? params
def defaultHandler = _ ># ('checkins ? ary)
}
object Checkin {
val id = 'id ? int
val user = 'user ? obj
val venue = 'venue ? obj
val display = 'display ? str
val message = 'message ? str
val shout = 'shout ? str
val created = 'created ? date
val timezone = 'timezone ? str
// extended
val mayor = 'mayor ? obj
object Mayor {
val mtype = 'type ? str
val checkins = 'checkins ? int
val user = 'user ? obj
val message = 'message ? str
}
val badges = 'badges ? ary
val scoring = 'scoring ? ary
object Score {
val points = 'points ? int
val icon = 'icon ? str
val message = 'message ? str
}
val specials = 'specials ? ary
object Special {
val id = 'id ? int
val stype = 'type ? str
val message = 'message ? str
}
}
object Users extends UserBuilder(Map())
private [foursquare] class UserBuilder(params: Map[String, Any]) extends Method[List[JField]] {
private def param(k: String)(v: Any) = new UserBuilder(params + (k -> v))
/** gets the authed user's info */
def current = new UserBuilder(Map())
/** gets a user's info by user id */
val get = param("uid")_
def withoutBadges = param("badges")(0)
def withBadges = param("badges")(1)
def withoutMayorships = param("mayor")(0)
def withMayorships = param("mayor")(1)
def product = (_: Request) / "user.json" <<? params
def defaultHandler = _ ># ('user ? obj)
}
object Friends extends FriendsBuilder(Map()) {
def ofMe = this
def named(name: String) = new FriendsByNameBuilder(Map(
"q" -> name
))
def withPhone(number: String) = new FriendsByPhoneBuilder(Map(
"q" -> number
))
def withTwitterName(screenName: String) = new FriendsByTwitterBuilder(Map(
"q" -> screenName
))
}
private [foursquare] class FriendsBuilder(params: Map[String, Any]) extends Method[List[JValue]] {
private def param(k: String)(v: Any) = new FriendsBuilder(params + (k -> v))
val of = param("uid")_
def product = (_: Request) / "friends.json" <<? params
def defaultHandler = _ ># ('friends ? ary)
}
private [foursquare] class FriendsByNameBuilder(params: Map[String, Any]) extends Method[List[JValue]] {
def product = (_: Request) / "findfriends" / "byname.json" <<? params
def defaultHandler = _ ># ('friends ? ary)
}
private [foursquare] class FriendsByPhoneBuilder(params: Map[String, Any]) extends Method[List[JValue]] {
def product = (_: Request) / "findfriends" / "byphone.json" <<? params
def defaultHandler = _ ># ('friends ? ary)
}
private [foursquare] class FriendsByTwitterBuilder(params: Map[String, Any]) extends Method[List[JValue]] {
def product = (_: Request) / "findfriends" / "bytwitter.json" <<? params
def defaultHandler = _ ># ('friends ? ary)
}
object Venues extends VenuesBuilder(Map()) {
def get(vid: Long) = new VenueBuilder(Map(
"vid" -> vid
))
def add = new VenueMakerBuilder(Map())
def proposeEditTo(vid: Long) = new EditVenueBuilder(Map(
"vid" -> vid
))
def close(vid: Long) = new CloseVenueBuilder(Map(
"vid" -> vid
))
}
private [foursquare] class VenuesBuilder(params: Map[String, Any]) extends Method[List[JValue]] {
private def param(k: String)(v: Any) = new VenuesBuilder(params + (k -> v))
def at(geolat: Double, geolong: Double) =
param("geolat")(geolat).param("geolong")(geolong)
def at(geoLatLong: (Double,Double)) =
param("geolat")(geoLatLong._1).param("geolong")(geoLatLong._2)
val limit = param("l")_
val named = param("q")_
def product = (_: Request) / "venues.json" <<? params
def defaultHandler = _ ># ('venues ? ary)
}
private [foursquare] class VenueBuilder(params: Map[String, Any]) extends Method[List[JValue]] {
def product = (_: Request) / "venue.json" <<? params
def defaultHandler = _ ># ('venue ? obj)
}
private [foursquare] class VenueMakerBuilder(params: Map[String, Any]) extends Method[List[JField]] {
private def param(k: String)(v: Any) = new VenueMakerBuilder(params + (k -> v))
val name = param("name")_
val address = param("address")_
val crossStreet = param("crossstreet")_
val city = param("city")_
val zip = param("zip")_
val cityId = param("cityid")_
val phone = param("phone")_
def at(geolat: Double, geolong: Double) =
param("geolat")(geolat).param("geolong")(geolong)
def at(geoLatLong: (Double,Double)) =
param("geolat")(geoLatLong._1).param("geolong")(geoLatLong._2)
def product = (_: Request) / "addvenue.json" << params
def defaultHandler = _ ># ('venue ? obj)
}
private [foursquare] class EditVenueBuilder(params: Map[String, Any]) extends VenueMakerBuilder(params) {
override def product = (_: Request) / "venue" / "proposeedit.json" << params
}
private [foursquare] class CloseVenueBuilder(params: Map[String, Any]) extends Method[List[String]] {
def product = (_: Request) / "venues" / "flagclosed.json" << params
def defaultHandler = _ ># ('response ? str)
}
object Tips extends TipsBuilder(Map()) {
/** adds a new Tip */
def add = new TipBuilder(Map())
/** changes a Tip to a Todo */
def toTodo(tid: Long) = new TipChangeBuilder(Map(
"tid" -> tid
))
/** flags a Todo as done */
def markDone(tid: Long) = new TipDoneBuilder(Map(
"tid" -> tid
))
}
private [foursquare] class TipsBuilder(params: Map[String, Any]) extends Method[List[JValue]] {
private def param(k: String)(v: Any) = new TipsBuilder(params + (k -> v))
def at(geolat: Double, geolong: Double) =
param("geolat")(geolat).param("geolong")(geolong)
def at(geoLatLong: (Double,Double)) =
param("geolat")(geoLatLong._1).param("geolong")(geoLatLong._2)
val limit = param("l")_
def product = (_: Request) / "tips.json" <<? params
def defaultHandler = _ ># ('group ? ary)
}
private [foursquare] class TipBuilder(params: Map[String, Any]) extends Method[List[JField]] {
private def param(k: String)(v: Any) = new TipBuilder(params + (k -> v))
def at(vid: Long) = param("vid")(vid)
def at(geolat: Double, geolong: Double) =
param("geolat")(geolat).param("geolong")(geolong)
def at(geoLatLong: (Double,Double)) =
param("geolat")(geoLatLong._1).param("geolong")(geoLatLong._2)
val text = param("text")_
val asTip = param("type")("tip")
val asTodo = param("type")("todo")
def product = (_: Request) / "addtip.json" << params
def defaultHandler = _ ># ('tip ? obj)
}
private [foursquare] class TipChangeBuilder(params: Map[String, Any]) extends Method[List[JField]] {
def product = (_: Request) / "tip" / "marktodo.json" << params
def defaultHandler = _ ># ('tip ? obj)
}
private [foursquare] class TipDoneBuilder(params: Map[String, Any]) extends Method[List[JField]] {
override def product = (_: Request) / "tip" / "markdone.json" << params
def defaultHandler = _ ># ('tip ? obj)
}
object Badge {
val id = 'id ? int
val name = 'name ? str
val icon = 'icon ? str
val description = 'description ? str
}
object User {
val id = 'id ? int
val firstname = 'firstname ? str
val lastname = 'lastname ? str
val gender = 'gender ? str
val photo = 'photo ? str
// extended
val city = 'city ? obj
val phone = 'phone ? str
val email = 'email ? str
val twitter = 'twitter ? str
val facebook = 'facebook ? str
val friendstatus = 'friendstatus ? str
val checkin = 'checking ? obj
val badges = 'badges ? ary
// if self
val settings = 'settings ? ary
}
object Venue {
val id = 'id ? int
val name = 'name ? str
val address = 'address ? str
val geolat = 'geolat ? int
val geolong = 'geolong ? int
// extended
val crossStreet = 'crossstreet ? str
val city = 'city ? str
val state = 'state ? str
val zip = 'zip ? str
val phone = 'phone ? str
val twitter = 'twitter ? str
val stats = 'stats ? obj
object Stats {
val checkins = 'checkins ? int
val beenHere = 'beenhere ? obj
object Been {
def me = 'me ? str // bool
def myFriends = 'friends ? str // bool
}
}
val checkins = 'checkins ? ary
val tips = 'tips ? ary
val tags = 'tags ? ary
object Tag { def txt = 'tag ? str }
// on edit
val response = 'response ? str
}
object Tip {
val id = 'id ? int
val txt = 'text ? str
val url = 'url ? str
val created = 'created ? date
val user = 'user ? obj
} | softprops/dispatch-foursquare | src/main/scala/dispatch/Foursquare.scala | Scala | mit | 13,472 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.hibench.sparkbench.ml
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.mllib.linalg.Matrix
import org.apache.spark.mllib.linalg.SingularValueDecomposition
import org.apache.spark.mllib.linalg.Vector
import org.apache.spark.mllib.linalg.distributed.RowMatrix
import org.apache.spark.rdd.RDD
import scopt.OptionParser
object SVDExample {
case class Params(
numFeatures: Int = 0,
numSingularValues: Int = 0,
computeU: Boolean = true,
maxResultSize: String = "1g",
dataPath: String = null
)
def main(args: Array[String]): Unit = {
val defaultParams = Params()
val parser = new OptionParser[Params]("SVD") {
head("SVD: an example of SVD for matrix decomposition.")
opt[Int]("numFeatures")
.text(s"numFeatures, default: ${defaultParams.numFeatures}")
.action((x,c) => c.copy(numFeatures = x))
opt[Int]("numSingularValues")
.text(s"numSingularValues, default: ${defaultParams.numSingularValues}")
.action((x,c) => c.copy(numSingularValues = x))
opt[Boolean]("computeU")
.text(s"computeU, default: ${defaultParams.computeU}")
.action((x,c) => c.copy(computeU = x))
opt[String]("maxResultSize")
.text(s"maxResultSize, default: ${defaultParams.maxResultSize}")
.action((x,c) => c.copy(maxResultSize = x))
arg[String]("<dataPath>")
.required()
.text("data path of SVD")
.action((x,c) => c.copy(dataPath = x))
}
parser.parse(args, defaultParams) match {
case Some(params) => run(params)
case _ => sys.exit(1)
}
}
def run(params: Params): Unit = {
val conf = new SparkConf()
.setAppName(s"SVD with $params")
.set("spark.driver.maxResultSize", params.maxResultSize)
val sc = new SparkContext(conf)
val dataPath = params.dataPath
val numFeatures = params.numFeatures
val numSingularValues = params.numSingularValues
val computeU = params.computeU
val data: RDD[Vector] = sc.objectFile(dataPath)
val mat: RowMatrix = new RowMatrix(data)
val svd: SingularValueDecomposition[RowMatrix, Matrix] = mat.computeSVD(numSingularValues, computeU)
val U: RowMatrix = svd.U // The U factor is a RowMatrix.
val s: Vector = svd.s // The singular values are stored in a local dense vector.
val V: Matrix = svd.V // The V factor is a local dense matrix.
sc.stop()
}
}
| maismail/HiBench | sparkbench/ml/src/main/scala/com/intel/sparkbench/ml/SVDExample.scala | Scala | apache-2.0 | 3,278 |
import play.api._
import play.api.libs.concurrent.Akka
import akka.actor._
import actors.WeatherRetriever
import actors.RepoRetriever
object Global extends GlobalSettings {
override def onStart(app: Application) {
Logger.info("Application has started")
Akka.system(app).actorOf(Props[WeatherRetriever], name = "weather")
Akka.system(app).actorOf(Props[RepoRetriever], name = "repo")
}
}
| jeffusan/atWaredaShboArd | app/Global.scala | Scala | mit | 405 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa
import java.io.Flushable
import org.geotools.data.simple.SimpleFeatureWriter
import org.locationtech.geomesa.utils.conf.GeoMesaSystemProperties.SystemProperty
package object index {
type FlushableFeatureWriter = SimpleFeatureWriter with Flushable
val FilterCacheSize = SystemProperty("geomesa.cache.filters.size", "1000")
val ZFilterCacheSize = SystemProperty("geomesa.cache.z-filters.size", "1000")
val PartitionParallelScan = SystemProperty("geomesa.partition.scan.parallel", "false")
val DistributedLockTimeout = SystemProperty("geomesa.distributed.lock.timeout", "2 minutes")
}
| locationtech/geomesa | geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/package.scala | Scala | apache-2.0 | 1,098 |
package sorra.lanka.core
import scala.collection._
import scala.collection.JavaConversions._
import java.nio.file.Files
import java.nio.file.Paths
import org.eclipse.jdt.core.dom._
import org.eclipse.jdt.core.JavaCore
import scala.reflect.ClassTag
object Run extends App {
Walker.launch(Seq("."),
{file => file.getPath().endsWith(".java") },
{context =>
val AstContext(cu, source, file) = context
val sel = {md: MethodDeclaration =>
md.setName(md.getAST().newSimpleName("hi"))
true
}
Selector(sel).start(cu)
println(cu.toString)
false
})
} | sorra/Lanka | src/sorra/lanka/core/Run.scala | Scala | apache-2.0 | 608 |
package icfpc2013
object StaticInput {
val hex = List(
"0000000000000000", "FFFFFFFFFFFFFFFF", "2A58520FB74CF334", "0A72EA451EAC43F3", "23D2B85622BBD9F0",
"A234920B6A68E52B", "2F84E29EAEA83D34", "7CFC5FA16EF19AFC", "181309BF80F87ADC", "04AF6A254D210E5B",
"41C851490795046F", "916C6A6ABB288059", "D03C4F0C1D53AA4C", "F33B78211D81AA73", "3A4B06DBE76FE622",
"0C19F5328BADD029", "85AA5E95E618D8A5", "684F4B6FD9DBD5CC", "7E944E6505F4EE7E", "A29D606D57D8709C",
"79AD51406A066082", "EDA9E3A93048C412", "6AC2D9B6D45B0326", "722BDAF092610079", "4275CEEF6BA51932",
"143B1106494DD6C2", "C3A155EB73821DB5", "47D54A7C60A0D599", "078E6B0BF00FB938", "AF8764E3917D03B3",
"2A090FDA1ABE5636", "7E66C42CA2424B69", "8C741103B95408CB", "D269B246A74B8B8A", "8544A8A57809BDAA",
"2BF39508483C616C", "681F80C837D7E454", "A5541E533F990044", "9E64DA2E21A70886", "D746DA773B33A9B7",
"1091DB4E416C0EF2", "5072C0D3E8A09BB0", "B2B94B9C687F0932", "F3BFAAC33A9A8774", "71B63F0E47F6D4F9",
"5E51D4B7104EE5F2", "97DF75DAB9C2FA8B", "F059CD9F2F2ABD5F", "9F2A74855CE07E6E", "2DE9788E9835AC02",
"2819981A459D9A61", "BCE5049E64924C4C", "D0C0AAB509AB918D", "4530FF108E98462B", "11DFBC4ACFFD13FA",
"46ACE3B7FF6DA19F", "41CC43A2E962E544", "979BCF98594A2E8C", "D1DCCBA667BFBA8D", "941339C9CDE915D2",
"BB8BB0D7F9C783B5", "1E21D6A1BC59E79A", "92C9B3263FCD93C8", "D4DF091242A3BAF2", "8B415FE0CE685CB8",
"70B5864FABC2329C", "EAD8980A0CE8B24B", "F3420FB602AD7B23", "64CDB6D02B0DC856", "6B4966E9C49B962E",
"F2A21C561A3E602B", "EFD6E03DF0BA8718", "3ADC20B80E22D94F", "4AC737589F8834BF", "FDC6B34EBE031970",
"99198A1608E6149B", "65F52A33DF067F95", "189A9DABEB53F313", "51B60602F6A35697", "FD1CD20DB19E6D0B",
"8C07ADE1476E7ACF", "ADB167936402EEC1", "934E67BEC0532710", "4A7DFAD95F944BD9", "9A3B10DF3A461CD7",
"10135AE747C844B5", "48465A9622069BC1", "B3D91D908BC78D0A", "CB1C8F53CDB2DC3C", "7073961FE09F27AF",
"B59223E1081D0181", "4F401231040B2BB8", "237998E8D928B6C1", "975D80DD168046C3", "983770B9C248EB9E",
"D975E8198F9A9091", "08793D4ABBB863DC", "23DF4C5FC36D83D8", "B2410A7F617F0E58", "BAF0EE8EBA9A3056",
"91C6A8AB4A015738", "97D4040EAFCDFA29", "E9C49B5541F5E963", "AAE54D385C9D7A5D", "02DEB03EA6719333",
"D02F2DF3B2D454C7", "C42C82A2C4804269", "D2DA2CA3178C1710", "66D26EBEB1BD4B61", "31EBFB3E6FE97514",
"5C37B99299CC6F86", "638B6EBD9D80766C", "A210F0E7D4B85707", "868208BD8F556949", "8D61C794095271CA",
"2219F75F0579AE83", "6E09F1FC1FFB73A0", "3D01AF6B1DA0B2F1", "F3A4C6DA4CFADFBA", "92CC84BBD7FAA9FA",
"9ABBA84020FF575C", "8739E4DDF267DD6C", "8AC778DD71C89FD0", "0CE317F2C5F0A68C", "A27DA17D5797939C",
"9489ABD69C9B646D", "2CA96AD62D0F72F6", "313EED842D7B0385", "149AAF7FE9F74778", "FE3F14B65765EA7C",
"EA3F2A44BD014F0F", "F3E105D0A96E6047", "9B8BE49B10A9A239", "53A5CE01F832BF75", "3F30896D3A9C896F",
"A6813DA4E9AD0AC9", "6BE6A020B72835DF", "962744E59B8EB708", "6C4AF3D5BA44D15D", "38BFF6A9FBAEBE12",
"B94FAAECF8F9199B", "601E55AF13DD4F37", "D939D2CB3790FA98", "30B44FF0FBD1B0C3", "670563DC767EDC56",
"63408C4EC21C26DA", "3A3701D9901797CF", "B4D8EE6DE14EF38B", "89D2FC4DF7E44729", "E5BEF4DC8E2EC41E",
"5AA55356C0446528", "5E995579B7EC1FB0", "797EC4A977CDEFE3", "E7807C13296CAB02", "D9C724D82C8082BA",
"7301CF5023C4558D", "200588D67ABB9B87", "A317DFCEA36BC3D8", "DB0303BA00377F12", "8D53E5907CC28B0E",
"ED34FCC18B4370CA", "30572F919E4111CA", "A768619A938331B2", "BD8169BE28A72A35", "557738DECE90FA99",
"28A2DD041CB1B953", "826658DE5788658E", "9AD00DC31BB2FF99", "0AE1469FE7D1FA91", "4F665C6DFA0E56C2",
"7FE6812A00CE538D", "D1278C6A915178E2", "2FD6BCFFCC7B0B85", "6B4C553D98B2A4BB", "FE3DD3EA861AED0E",
"3DCC61E61F5C334E", "9AD46CFBBE8E13FD", "AF1C6287396372B5", "82A5A0D0119DF660", "C68914483FD1DA2F",
"E3341E145D4C88B1", "4B60637DBB04EB7C", "1B2E924B3174F148", "B4880C0A4E1A556B", "56FB5ACD9FFF7F11",
"04335BFE37632BB0", "155FC2146BD1D77F", "825FFC85BE3247FB", "2A074D3299BF8FC0", "F0DBE5050F072845",
"CCF3DDA5DA4EA532", "BB14DC7E0557DE6C", "2727AB5018668914", "DD142D357B51E502", "92665AC09737B380",
"BE0F6E954512ED61", "EBFAB3B68C14F199", "1E218953AB57FB02", "2D42BBE596D6E300", "FA4D59D44AB2600E",
"409C39FB27F55A8B", "101D94CBC5376F35", "4A67E4DDFE099638", "794BD226DE98F6BF", "6C79C336D827825F",
"DA12E22BE2E1D8E0", "3AB17F3426AB7C27", "606ED0DDAFF3554F", "81DDB2887BFD473A", "89CF3BA54694D819",
"C2B96EB649A02ABC", "65E6DD2D62B161B1", "7A03858DCEF36978", "128D738C907ADC4C", "60A355F83585895C",
"145764AC66DCF26B", "A3C720EF8E101E27", "3BD8C4DEB42E3AB9", "59242ACF90590B60", "1EBFA3D749A7CD16",
"57412CD4B2917189", "184BC3B9CA1894F3", "1137DADAE199BA94", "F4681E8E21CAF4E0", "18E2816B0E157996",
"2E0CDDDD9DE88F62", "B636FFF709B111BA", "3AD6A89692114909", "56E0A3632FA65877", "152A6B320897D7C2",
"FA3EA4C6DAACA705", "D20AFE169E277369", "B40550733C540F0C", "F2384E3FB9BB7BA2", "B71C19333D92A69E",
"8D85EC1D370B26E9", "63331BD351461F67", "DDD124B711188EE6", "9E2ED6E4AEDB1D23", "DADD0916F5B3AE05",
"6F311AC1186EFF4F", "FBDD07E80EB72246", "B7F275CBE3D81223", "24DCFD5AF3CDC951", "EA8FA0FD744B5ABE",
"BFBFDD4A2CED89CD", "5A22B09D1A1672A3", "E07C5BA013A4DDF1", "EBB80D4C8A572664", "9CD136F72D1DF35A",
"67AB794CE901CB9C", "D72109825193741F", "E979A5F9FF228221", "2CBFC42C7D204D22", "21D71D986D54D47A",
"6FFA3800EA9505A8")
val hex0x = hex.map("0x" + _)
val long = hex.map(HexString.toLong(_))
}
| ShiftForward/icfpc2013 | src/main/scala/icfpc2013/static_input.scala | Scala | mit | 5,464 |
package wlml.ml
import breeze.linalg._
import breeze.linalg.qr.QR
import breeze.optimize._
trait Optimizer extends wlml.ml.Opts {
// Start Gradient Descent for Sparse Types
def gdSolver(weights: SparseVector[Double], features: CSCMatrix[Double],
outputs: SparseVector[Double], params: Parameters, iter: Int)
(costFunc: (CSCMatrix[Double], SparseVector[Double],SparseVector[Double], Parameters) => (Double,SparseVector[Double])): SparseVector[Double] = {
def checkNecessity(chVal: Double, iter: Int, ps: Parameters): Boolean = {
if (((chVal < ps.tolerance) && (iter != 0)) || (iter >= ps.maxiter)) false
else true
}
def descentAmountChecker(cost: Double, pa: Parameters): Double = {
(pa.earlierCost - cost) / pa.earlierCost
}
// Calculate cost
val (cost:Double, gradient:SparseVector[Double]) = costFunc(features, outputs, weights, params)
val checkValue = descentAmountChecker(cost, params)
val nextWeights = weights.copy
nextWeights :-= gradient * params.stepSize
println(iter, cost, checkValue)
if (checkNecessity(checkValue, iter, params)) {
val paramsNext = params.copy(earlierCost = cost)
gdSolver(nextWeights, features, outputs, paramsNext, iter + 1)(costFunc)
} else weights
}
// End of Gradient Descent for Sparse Types
// Start of QR Decomposition for Dense Types
def qrSolver(features: DenseMatrix[Double], outputs: DenseVector[Double],
params: Parameters): DenseVector[Double] = {
val n: Int = features.cols
val feats = DenseMatrix.vertcat(features, DenseMatrix.eye[Double](n) * params.l2_penalty)
val outs = DenseVector.vertcat(outputs, DenseVector.zeros[Double](n))
val QR(qValue, rValue) = qr.reduced(feats)
inv(rValue) * (qValue.t * outs)
}
def lbfgsSolver(initialWeights: SparseVector[Double], features: CSCMatrix[Double], outputs: SparseVector[Double], params: Parameters)
(costFunc: (CSCMatrix[Double], SparseVector[Double],SparseVector[Double], Parameters) => (Double,SparseVector[Double])) = {
val obj = new DiffFunction[SparseVector[Double]] {
override def calculate(weights: SparseVector[Double]): (Double, SparseVector[Double]) = {
costFunc(features, outputs, weights, params)
}
}
//val initWeights = SparseVector(Array.fill(features.cols)(1.0))
new LBFGS[SparseVector[Double]](tolerance = params.tolerance).minimize(obj, initialWeights)
}
}
| Karfroth/WLML_Scala | src/main/scala/wlml/ml/Optimizer.scala | Scala | mit | 2,480 |
import org.apache.spark.mllib.linalg.Matrix
object CsvWriter {
def writeMatrixToFile(matrix: Matrix, filename : String): Unit = {
import java.io._
val localMatrix: List[Array[Double]] = matrix
.transpose // Transpose since .toArray is column major
.toArray
.grouped(matrix.numCols)
.toList
val lines: List[String] = localMatrix
.map(line => line.mkString(","))
.map(_ + "\\n")
val writer = new PrintWriter(new File(filename))
lines.foreach(writer.write)
writer.close()
}
}
| DanteLore/mot-data-in-spark | src/main/scala/CsvWriter.scala | Scala | mit | 540 |
package com.twitter.zk
import org.apache.zookeeper.KeeperException
import com.twitter.conversions.time._
import com.twitter.util.{Duration, Future, Timer}
/** Pluggable retry strategy. */
trait RetryPolicy {
def apply[T](op: => Future[T]): Future[T]
}
/** Matcher for connection-related KeeperExceptions. */
object KeeperConnectionException {
def unapply(e: KeeperException) = e match {
case e: KeeperException.ConnectionLossException => Some(e)
case e: KeeperException.SessionExpiredException => Some(e)
case e: KeeperException.SessionMovedException => Some(e)
case e: KeeperException.OperationTimeoutException => Some(e)
case e => None
}
}
object RetryPolicy {
/** Retries an operation a fixed number of times without back-off. */
case class Basic(retries: Int) extends RetryPolicy {
def apply[T](op: => Future[T]): Future[T] = {
def retry(tries: Int): Future[T] = {
op rescue { case KeeperConnectionException(_) if (tries > 0) =>
retry(tries - 1)
}
}
retry(retries)
}
}
/**
* Retries an operation indefinitely until success, with a delay that increases exponentially.
*
* @param base initial value that is multiplied by factor every time; should be > 0
* @param factor should be >= 1 so the retries do not become more aggressive
*/
case class Exponential(
base: Duration,
factor: Double = 2.0,
maximum: Duration = 30.seconds
)(implicit timer: Timer) extends RetryPolicy {
require(base > 0.seconds)
require(factor >= 1)
def apply[T](op: => Future[T]): Future[T] = {
def retry(delay: Duration): Future[T] = {
op rescue { case KeeperConnectionException(_) =>
timer.doLater(delay) {
retry((delay.inNanoseconds * factor).toLong.nanoseconds min maximum)
}.flatten
}
}
retry(base)
}
}
/** A single try */
object None extends RetryPolicy {
def apply[T](op: => Future[T]) = op
}
}
| BuoyantIO/twitter-util | util-zk/src/main/scala/com/twitter/zk/RetryPolicy.scala | Scala | apache-2.0 | 1,999 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.spark
import java.util.ServiceLoader
object GeoMesaSpark {
import scala.collection.JavaConversions._
lazy val providers: ServiceLoader[SpatialRDDProvider] = ServiceLoader.load(classOf[SpatialRDDProvider])
def apply(params: java.util.Map[String, _ <: java.io.Serializable]): SpatialRDDProvider =
providers.find(_.canProcess(params)).getOrElse(throw new RuntimeException("Could not find a SpatialRDDProvider"))
}
| elahrvivaz/geomesa | geomesa-spark/geomesa-spark-core/src/main/scala/org/locationtech/geomesa/spark/GeoMesaSpark.scala | Scala | apache-2.0 | 925 |
/**
* The MIT License (MIT)
* <p/>
* Copyright (c) 2016 ScalateKids
* <p/>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p/>
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* <p/>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
* <p/>
* @author Scalatekids
* @version 1.0
* @since 1.0
*/
package com.actorbase.driver.client
import scalaj.http.{Http, HttpOptions}
import com.actorbase.driver.client.api.RestMethods._
/**
* This class have the responsibility of the communication
* with the HTTP interface of the server side of Actorbase
*
*/
class ActorbaseClient extends Client {
private val client = Http
private val options = createClientOptions
/**
* Add connection options to the scalaj-http client Object
*
* @return a sequence of HttpOption representing options to be applied to the
* connection object
*/
override def createClientOptions: Seq[HttpOptions.HttpOption] = Seq(HttpOptions.readTimeout(60000))
/**
* Send method, send a Request object to the Actorbase server listening
* and return a Response object
*
* @param request a Request reference, contains all HTTP request details
* @return an object of type Response, containing the status of the response
* and the body as Option[String]
*/
override def send(request: Request): Response = {
val response = request.method match {
case GET => Http(request.uri).auth(request.user, request.password).headers(request.headers).options(options).asString
case POST => Http(request.uri).auth(request.user, request.password).headers(request.headers).postData(request.body.getOrElse("None")).options(options).asString
case PUT => Http(request.uri).auth(request.user, request.password).headers(request.headers).postData(request.body.getOrElse("None")).method("PUT").options(options).asString
case DELETE => Http(request.uri).auth(request.user, request.password).headers(request.headers).postData(request.body.getOrElse("None")).method("DELETE").options(options).asString
}
Response(response.code, Some(response.body.asInstanceOf[String]))
}
}
| ScalateKids/Actorbase-Client | src/main/scala/com/actorbase/driver/client/ActorbaseClient.scala | Scala | mit | 3,079 |
package io.buoyant.telemetry.prometheus
import com.twitter.finagle.Service
import com.twitter.finagle.http.{MediaType, Response, Request}
import com.twitter.finagle.stats.NullStatsReceiver
import com.twitter.finagle.tracing.NullTracer
import com.twitter.util.{Awaitable, Closable, Future}
import io.buoyant.admin.Admin
import io.buoyant.telemetry.{Metric, MetricsTree, Telemeter}
/**
* This telemeter exposes metrics data in the Prometheus format, served on
* an admin endpoint. It does not provide a StatsReceiver or Tracer. It reads
* histogram summaries directly off of the MetricsTree and assumes that stats
* are being snapshotted at some appropriate interval.
*/
class PrometheusTelemeter(metrics: MetricsTree) extends Telemeter with Admin.WithHandlers {
private[prometheus] val handler = Service.mk { request: Request =>
val response = Response()
response.version = request.version
response.mediaType = MediaType.Txt
val sb = new StringBuilder()
writeMetrics(metrics, sb)
response.contentString = sb.toString
Future.value(response)
}
val adminHandlers: Seq[Admin.Handler] = Seq(
Admin.Handler("/admin/metrics/prometheus", handler)
)
val stats = NullStatsReceiver
def tracer = NullTracer
def run(): Closable with Awaitable[Unit] = Telemeter.nopRun
private[this] val metricNameDisallowedChars = "[^a-zA-Z0-9:]".r
private[this] def escapeKey(key: String) = metricNameDisallowedChars.replaceAllIn(key, "_")
private[this] val labelKeyDisallowedChars = "[^a-zA-Z0-9_]".r
private[this] def escapeLabelKey(key: String) = labelKeyDisallowedChars.replaceAllIn(key, "_")
// https://prometheus.io/docs/instrumenting/exposition_formats/#text-format-details
private[this] val labelValDisallowedChars = """(\\\\|\\"|\\n)""".r
private[this] def escapeLabelVal(key: String) = labelValDisallowedChars.replaceAllIn(key, """\\\\\\\\""")
private[this] def formatLabels(labels: Seq[(String, String)]): String =
if (labels.nonEmpty) {
labels.map {
case (k, v) =>
s"""${escapeLabelKey(k)}="${escapeLabelVal(v)}""""
}.mkString("{", ", ", "}")
} else {
""
}
private[this] val first: ((String, String)) => String = _._1
private[this] def labelExists(labels: Seq[(String, String)], name: String) =
labels.toIterator.map(first).contains(name)
private[this] def writeMetrics(
tree: MetricsTree,
sb: StringBuilder,
prefix0: Seq[String] = Nil,
labels0: Seq[(String, String)] = Nil
): Unit = {
// Re-write elements out of the prefix into labels
val (prefix1, labels1) = prefix0 match {
case Seq("rt", router) if !labelExists(labels0, "rt") =>
(Seq("rt"), labels0 :+ ("rt" -> router))
case Seq("rt", "service", path) if !labelExists(labels0, "service") =>
(Seq("rt", "service"), labels0 :+ ("service" -> path))
case Seq("rt", "client", id) if !labelExists(labels0, "client") =>
(Seq("rt", "client"), labels0 :+ ("client" -> id))
case Seq("rt", "client", "service", path) if !labelExists(labels0, "service") =>
(Seq("rt", "client", "service"), labels0 :+ ("service" -> path))
case Seq("rt", "server", srv) if !labelExists(labels0, "server") =>
(Seq("rt", "server"), labels0 :+ ("server" -> srv))
case _ => (prefix0, labels0)
}
val key = escapeKey(prefix1.mkString(":"))
tree.metric match {
case c: Metric.Counter =>
sb.append(key)
sb.append(formatLabels(labels1))
sb.append(" ")
sb.append(c.get)
sb.append("\\n")
case g: Metric.Gauge =>
sb.append(key)
sb.append(formatLabels(labels1))
sb.append(" ")
sb.append(g.get)
sb.append("\\n")
case s: Metric.Stat =>
val summary = s.snapshottedSummary
if (summary != null) {
for (
(stat, value) <- Seq(
"count" -> summary.count, "sum" -> summary.sum, "avg" -> summary.avg
)
) {
sb.append(key)
sb.append("_")
sb.append(stat)
sb.append(formatLabels(labels1))
sb.append(" ")
sb.append(value)
sb.append("\\n")
}
for (
(percentile, value) <- Seq(
"0" -> summary.min, "0.5" -> summary.p50,
"0.9" -> summary.p90, "0.95" -> summary.p95, "0.99" -> summary.p99,
"0.999" -> summary.p9990, "0.9999" -> summary.p9999,
"1" -> summary.max
)
) {
sb.append(key)
sb.append(formatLabels(labels1 :+ ("quantile" -> percentile)))
sb.append(" ")
sb.append(value)
sb.append("\\n")
}
}
case _ =>
}
for ((name, child) <- tree.children) {
writeMetrics(child, sb, prefix1 :+ name, labels1)
}
}
}
| pawelprazak/linkerd | telemetry/prometheus/src/main/scala/io/buoyant/telemetry/prometheus/PrometheusTelemeter.scala | Scala | apache-2.0 | 4,894 |
package cook.actor.impl.util
import cook.actor.StatusManager
import cook.actor.TaskType
class TaskBuilder(taskType: TaskType.Value) {
import cook.util.LogSourceProvider._
import akka.event.Logging
private val log = Logging(cook.app.Global.system, this)
def apply(taskName: String)
(runBlock: => Unit)
(implicit statusManager: StatusManager): Runnable = new Runnable {
override def run() {
log.debug("Task {} '{}': Start", taskType, taskName)
statusManager.startTask(taskType, taskName)
runBlock
log.debug("Task {} '{}': End", taskType, taskName)
statusManager.endTask(taskType, taskName)
}
}
}
| timgreen/cook | src/cook/actor/impl/util/TaskBuilder.scala | Scala | apache-2.0 | 669 |
/*
* Copyright 2018 CJWW Development
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package helpers.other
import akka.util.Timeout
import org.scalatest.Assertion
import org.scalatestplus.play.PlaySpec
import play.api.mvc.{Action, Request, Result}
import play.api.test.{DefaultAwaitTimeout, FutureAwaits}
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.reflect.ClassTag
trait FutureAsserts
extends FutureAwaits
with DefaultAwaitTimeout {
self: PlaySpec =>
override implicit def defaultAwaitTimeout: Timeout = 5.seconds
def awaitAndAssert[T](methodUnderTest: => Future[T])(assertions: T => Assertion): Assertion = {
assertions(await(methodUnderTest))
}
def assertResult(methodUnderTest: => Future[Result])(assertions: Future[Result] => Assertion): Assertion = {
assertions(methodUnderTest)
}
def assertReturn[T](methodUnderTest: => T)(assertions: T => Assertion): Assertion = {
assertions(methodUnderTest)
}
def awaitAndIntercept[X <: AnyRef](methodUnderTest: => Future[Any])(implicit classTag: ClassTag[X]): X = {
intercept[X](await(methodUnderTest))
}
def runActionWithoutAuth[A](action: Action[A], request: Request[A])(test: Future[Result] => Assertion): Assertion = {
test(action(request))
}
}
| cjww-development/auth-microservice | test/helpers/other/FutureAsserts.scala | Scala | apache-2.0 | 1,803 |
package reactivemongo.api.commands
import scala.util.Success
import reactivemongo.api.{
ReadConcern,
Session,
SerializationPack,
WriteConcern => WC
}
private[reactivemongo] object CommandCodecs {
/**
* Helper to read a command result, with error handling.
*/
def dealingWithGenericCommandErrorsReader[P <: SerializationPack, A](pack: P)(readResult: pack.Document => A): pack.Reader[A] = {
val decoder = pack.newDecoder
pack.reader[A] { doc: pack.Document =>
decoder.booleanLike(doc, "ok") match {
case Some(true) => {
decoder.string(doc, "note").foreach { note =>
Command.logger.info(s"${note}: ${pack pretty doc}")
}
readResult(doc)
}
case _ => throw CommandError(pack)(
code = decoder.int(doc, "code"),
errmsg = decoder.string(doc, "errmsg"),
originalDocument = doc)
}
}
}
@inline def defaultWriteResultReader[P <: SerializationPack with Singleton](pack: P): pack.Reader[DefaultWriteResult] = writeResultReader[DefaultWriteResult, pack.type](pack)
def writeResultReader[WR >: DefaultWriteResult, P <: SerializationPack with Singleton](pack: P): pack.Reader[WR] = {
val decoder = pack.newDecoder
val readWriteError = CommandCodecs.readWriteError(decoder)
val readWriteConcernError = CommandCodecs.readWriteConcernError(decoder)
dealingWithGenericCommandErrorsReader[pack.type, WR](pack) { doc =>
val werrors = decoder.children(doc, "writeErrors").map(readWriteError)
val wcError = decoder.child(doc, "writeConcernError").
map(readWriteConcernError)
DefaultWriteResult(
ok = decoder.booleanLike(doc, "ok").getOrElse(true),
n = decoder.int(doc, "n").getOrElse(0),
writeErrors = werrors,
writeConcernError = wcError,
code = decoder.int(doc, "code"),
errmsg = decoder.string(doc, "errmsg"))
}
}
def unitBoxReader[P <: SerializationPack](pack: P): pack.Reader[UnitBox.type] = dealingWithGenericCommandErrorsReader[pack.type, UnitBox.type](pack) { _ => UnitBox }
//def writeReadConcern[P <: SerializationPack with Singleton](pack: P): ReadConcern => pack.Document = writeReadConcern[pack.type](pack.newBuilder)
def writeReadConcern[P <: SerializationPack with Singleton](builder: SerializationPack.Builder[P]): ReadConcern => Seq[builder.pack.ElementProducer] = { c: ReadConcern => Seq(builder.elementProducer("level", builder.string(c.level))) }
def writeSessionReadConcern[P <: SerializationPack with Singleton](builder: SerializationPack.Builder[P]): Option[Session] => ReadConcern => Seq[builder.pack.ElementProducer] = {
import builder.{ document, elementProducer => element, pack }
val simpleReadConcern = writeReadConcern(builder)
def simpleRead(c: ReadConcern): pack.ElementProducer =
element("readConcern", document(simpleReadConcern(c)))
val writeSession = CommandCodecs.writeSession(builder)
{ session =>
session match {
case Some(s) => {
val elements = Seq.newBuilder[pack.ElementProducer]
elements ++= writeSession(s)
if (!s.transaction.filter(_.flagSent).isSuccess) {
// No transaction, or flag not yet send (first tx command)
s.operationTime match {
case Some(opTime) => { c: ReadConcern =>
elements += element("readConcern", document(
simpleReadConcern(c) :+ element(
"afterClusterTime", builder.timestamp(opTime))))
elements.result()
}
case _ => { c: ReadConcern =>
elements += simpleRead(c)
elements.result()
}
}
} else { _: ReadConcern =>
// Ignore: Only the first command in a transaction
// may specify a readConcern (code=72)
elements.result()
}
}
case _ => { c: ReadConcern => Seq(simpleRead(c)) }
}
}
}
@inline def writeWriteConcern[P <: SerializationPack with Singleton](pack: P): WC => pack.Document = writeWriteConcern[pack.type](pack.newBuilder)
def writeGetLastErrorWWriter[P <: SerializationPack with Singleton](
builder: SerializationPack.Builder[P]): WC.W => builder.pack.Value = {
case GetLastError.TagSet(tagSet) => builder.string(tagSet)
case GetLastError.WaitForAcknowledgments(n) => builder.int(n)
case GetLastError.WaitForAknowledgments(n) => builder.int(n)
case _ => builder.string("majority")
}
def writeWriteConcern[P <: SerializationPack with Singleton](
builder: SerializationPack.Builder[P]): WC => builder.pack.Document = {
val writeGLEW = writeGetLastErrorWWriter(builder)
{ writeConcern: WC =>
import builder.{ elementProducer => element, int }
val elements = Seq.newBuilder[builder.pack.ElementProducer]
elements += element("w", writeGLEW(writeConcern.w))
elements += element("j", builder.boolean(writeConcern.j))
writeConcern.wtimeout.foreach { t =>
elements += element("wtimeout", int(t))
}
builder.document(elements.result())
}
}
def writeSession[P <: SerializationPack with Singleton](builder: SerializationPack.Builder[P]): Session => Seq[builder.pack.ElementProducer] = {
import builder.{ elementProducer => element }
{ session: Session =>
val idElmt = builder.document(Seq(
element("id", builder.uuid(session.lsid))))
session.transaction match {
case Success(transaction) => {
val elms = Seq.newBuilder[builder.pack.ElementProducer]
elms ++= Seq(
element("lsid", idElmt),
element("txnNumber", builder.long(transaction.txnNumber)))
if (!session.transactionToFlag()) {
elms += element("startTransaction", builder.boolean(true))
}
elms += element("autocommit", builder.boolean(false))
elms.result()
}
case _ => Seq(element("lsid", idElmt))
}
}
}
def readWriteError[P <: SerializationPack with Singleton](decoder: SerializationPack.Decoder[P]): decoder.pack.Document => WriteError = { doc =>
(for {
index <- decoder.int(doc, "index")
code <- decoder.int(doc, "code")
err <- decoder.string(doc, "errmsg")
} yield WriteError(index, code, err)).get
}
def readWriteConcernError[P <: SerializationPack with Singleton](decoder: SerializationPack.Decoder[P]): decoder.pack.Document => WriteConcernError = { doc =>
(for {
code <- decoder.int(doc, "code")
err <- decoder.string(doc, "errmsg")
} yield WriteConcernError(code, err)).get
}
def readUpserted[P <: SerializationPack with Singleton](decoder: SerializationPack.Decoder[P]): decoder.pack.Document => Upserted.Aux[P] = { document =>
(for {
index <- decoder.int(document, "index")
id <- decoder.get(document, "_id")
} yield Upserted.init[P](index, id)).get
}
}
| ornicar/ReactiveMongo | driver/src/main/scala/api/commands/CommandCodecs.scala | Scala | apache-2.0 | 7,080 |
/* Copyright 2009-2016 EPFL, Lausanne */
package leon
package genc
import CAST.Tree
/* Printer helpers adapted to C code generation */
case class PrinterContext(
indent: Int,
printer: CPrinter,
previous: Option[Tree],
current: Tree
)
object CPrinterHelpers {
implicit class Printable(val f: PrinterContext => Any) extends AnyVal {
def print(ctx: PrinterContext) = f(ctx)
}
implicit class PrinterHelper(val sc: StringContext) extends AnyVal {
def c(args: Any*)(implicit ctx: PrinterContext): Unit = {
val printer = ctx.printer
val sb = printer.sb
import printer.WrapperTree
val strings = sc.parts.iterator
val expressions = args.iterator
var extraInd = 0
var firstElem = true
while(strings.hasNext) {
val s = strings.next.stripMargin
// Compute indentation
val start = s.lastIndexOf('\\n')
if(start >= 0 || firstElem) {
var i = start + 1
while(i < s.length && s(i) == ' ') {
i += 1
}
extraInd = (i - start - 1) / 2
}
firstElem = false
// Make sure new lines are also indented
sb.append(s.replaceAll("\\n", "\\n" + (" " * ctx.indent)))
val nctx = ctx.copy(indent = ctx.indent + extraInd)
if (expressions.hasNext) {
val e = expressions.next
e match {
case ts: Seq[Any] =>
nary(ts).print(nctx)
case t: Tree =>
val nctx2 = nctx.copy(current = t, previous = Some(nctx.current))
printer.pp(t)(nctx2)
case wt: WrapperTree =>
printer.pp(wt)(nctx)
case p: Printable =>
p.print(nctx)
case e =>
sb.append(e.toString)
}
}
}
}
}
def nary(ls: Seq[Any], sep: String = ", ", opening: String = "", closing: String = ""): Printable = {
val (o, c) = if (ls.isEmpty) ("", "") else (opening, closing)
val strs = o +: List.fill(ls.size-1)(sep) :+ c
implicit pctx: PrinterContext =>
new StringContext(strs: _*).c(ls: _*)
}
}
| regb/leon | src/main/scala/leon/genc/CPrinterHelper.scala | Scala | gpl-3.0 | 2,151 |
import concurrent.Future
import java.io.{PrintStream, PrintWriter}
import java.util.logging.{Level, Logger => JavaLogger}
package object monitoring {
implicit def PrintStream2Log(out : PrintStream) = new {
def error(msg: String, ex: Throwable) {
out.println(msg)
ex.printStackTrace(out)
}
}
implicit def PrintWriter2Log(out : PrintWriter) = new {
def error(msg: String, ex: Throwable) {
out.println(msg)
ex.printStackTrace(out)
}
}
implicit def java_util_logging_Logger2Log(out : JavaLogger) = new {
def error(msg: String, ex: Throwable) {
out.log(Level.SEVERE, msg, ex)
}
}
implicit def JournaledFuture[T](f: Future[T])(implicit journal : Journal) = new {
def as(msg : String) = journal(msg, f)
}
implicit def JournaledBlock[T](f: => T)(implicit journal : Journal) = new {
def as(msg : String) = journal(msg)(f)
def onErrorLog[T, LOG <% { def error(msg: String, ex: Throwable) : Unit}](log : LOG, description:String = "operation") = journal.reportIfFails(log, description)(f)
// def reportIfFails[T](description:String = "operation")(implicit log : { def error(msg: String, ex: Throwable) : Unit}) = journal.reportIfFails(log, description)(f)
}
object DevNull extends Journal{
override def info(message: => String) { }
override def apply[T](description: String, future: => Future[T]) = future
override def apply[T](description: String)(f: => T) = f
}
}
| piotrga/monitoring | src/main/scala/monitoring/package.scala | Scala | apache-2.0 | 1,468 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.consumer
import kafka.utils.{IteratorTemplate, Logging, Utils}
import java.util.concurrent.{TimeUnit, BlockingQueue}
import kafka.serializer.Decoder
import java.util.concurrent.atomic.AtomicReference
import kafka.message.{MessageAndOffset, MessageAndMetadata}
import kafka.common.{KafkaException, MessageSizeTooLargeException}
/**
* An iterator that blocks until a value can be read from the supplied queue.
* The iterator takes a shutdownCommand object which can be added to the queue to trigger a shutdown
*
*/
class ConsumerIterator[K, V](private val channel: BlockingQueue[FetchedDataChunk],
consumerTimeoutMs: Int,
private val keyDecoder: Decoder[K],
private val valueDecoder: Decoder[V],
val clientId: String)
extends IteratorTemplate[MessageAndMetadata[K, V]] with Logging {
private var current: AtomicReference[Iterator[MessageAndOffset]] = new AtomicReference(null)
private var currentTopicInfo: PartitionTopicInfo = null
private var consumedOffset: Long = -1L
private val consumerTopicStats = ConsumerTopicStatsRegistry.getConsumerTopicStat(clientId)
override def next(): MessageAndMetadata[K, V] = {
val item = super.next()
if(consumedOffset < 0)
throw new KafkaException("Offset returned by the message set is invalid %d".format(consumedOffset))
currentTopicInfo.resetConsumeOffset(consumedOffset)
val topic = currentTopicInfo.topic
trace("Setting %s consumed offset to %d".format(topic, consumedOffset))
// consumerTopicStats.getConsumerTopicStats(topic).messageRate.mark()
// consumerTopicStats.getConsumerAllTopicStats().messageRate.mark()
item
}
protected def makeNext(): MessageAndMetadata[K, V] = {
var currentDataChunk: FetchedDataChunk = null
// if we don't have an iterator, get one
var localCurrent = current.get()
if(localCurrent == null || !localCurrent.hasNext) {
if (consumerTimeoutMs < 0)
currentDataChunk = channel.take
else {
currentDataChunk = channel.poll(consumerTimeoutMs, TimeUnit.MILLISECONDS)
if (currentDataChunk == null) {
// reset state to make the iterator re-iterable
resetState()
throw new ConsumerTimeoutException
}
}
if(currentDataChunk eq ZookeeperConsumerConnector.shutdownCommand) {
debug("Received the shutdown command")
channel.offer(currentDataChunk)
return allDone
} else {
currentTopicInfo = currentDataChunk.topicInfo
val cdcFetchOffset = currentDataChunk.fetchOffset
val ctiConsumeOffset = currentTopicInfo.getConsumeOffset
if (ctiConsumeOffset < cdcFetchOffset) {
error("consumed offset: %d doesn't match fetch offset: %d for %s;\\n Consumer may lose data"
.format(ctiConsumeOffset, cdcFetchOffset, currentTopicInfo))
currentTopicInfo.resetConsumeOffset(cdcFetchOffset)
}
localCurrent = currentDataChunk.messages.iterator
current.set(localCurrent)
}
// if we just updated the current chunk and it is empty that means the fetch size is too small!
if(currentDataChunk.messages.validBytes == 0)
throw new MessageSizeTooLargeException("Found a message larger than the maximum fetch size of this consumer on topic " +
"%s partition %d at fetch offset %d. Increase the fetch size, or decrease the maximum message size the broker will allow."
.format(currentDataChunk.topicInfo.topic, currentDataChunk.topicInfo.partitionId, currentDataChunk.fetchOffset))
}
var item = localCurrent.next()
// reject the messages that have already been consumed
while (item.offset < currentTopicInfo.getConsumeOffset && localCurrent.hasNext) {
item = localCurrent.next()
}
consumedOffset = item.nextOffset
item.message.ensureValid() // validate checksum of message to ensure it is valid
val keyBuffer = item.message.key
val key = if(keyBuffer == null) null.asInstanceOf[K] else keyDecoder.fromBytes(Utils.readBytes(keyBuffer))
val value = valueDecoder.fromBytes(Utils.readBytes(item.message.payload))
new MessageAndMetadata(key, value, currentTopicInfo.topic, currentTopicInfo.partitionId, item.offset)
}
def clearCurrentChunk() {
try {
debug("Clearing the current data chunk for this consumer iterator")
current.set(null)
}
}
}
class ConsumerTimeoutException() extends RuntimeException()
| kavink92/kafka-0.8.0-beta1-src | core/src/main/scala/kafka/consumer/ConsumerIterator.scala | Scala | apache-2.0 | 5,437 |
package lila.round
import org.joda.time.DateTime
import play.api.libs.json._
import chess.format.Uci
import chess.{ Pos, Move }
import lila.game.Game
case class Forecast(
_id: String, // player full id
steps: Forecast.Steps,
date: DateTime) {
def apply(g: Game, lastMove: Move): Option[(Forecast, Uci.Move)] =
nextMove(g, lastMove) map { move =>
copy(
steps = steps.collect {
case (fst :: snd :: rest) if rest.nonEmpty && g.turns == fst.ply && fst.is(lastMove) && snd.is(move) => rest
},
date = DateTime.now
) -> move
}
// accept up to 30 lines of 30 moves each
def truncate = copy(steps = steps.take(30).map(_ take 30))
private def nextMove(g: Game, last: Move) = steps.foldLeft(none[Uci.Move]) {
case (None, fst :: snd :: _) if g.turns == fst.ply && fst.is(last) => snd.uciMove
case (move, _) => move
}
}
object Forecast {
type Steps = List[List[Step]]
def maxPlies(steps: Steps): Int = ~steps.map(_.size).sortBy(-_).lastOption
case class Step(
ply: Int,
uci: String,
san: String,
fen: String,
check: Option[Boolean],
dests: String) {
def is(move: Move) = move.toUci.uci == uci
def is(move: Uci.Move) = move.uci == uci
def uciMove = Uci.Move(uci)
}
implicit val forecastStepJsonFormat = Json.format[Step]
implicit val forecastJsonWriter = Json.writes[Forecast]
case object OutOfSync extends lila.common.LilaException {
val message = "Forecast out of sync"
}
}
| JimmyMow/lila | modules/round/src/main/Forecast.scala | Scala | mit | 1,529 |
package com.asmasa.akka.cluster
case class Failed(reason: String)
case class MessageFailed(reson: String)
case object AmamiRegistration
| asmasa/akka_sample | src/main/scala/com/asmasa/akka/cluster/Message.scala | Scala | mit | 137 |
package common.api.commands
import common.api.PermissionLevel.PermissionLevel
import common.api.{Command, PermissionLevel}
trait PingCommand extends Command {
override val name: String = "ping"
override val permissionLevel: PermissionLevel = PermissionLevel.SUBSCRIBERS
}
| Cobbleopolis/MonsterTruckBot | modules/common/app/common/api/commands/PingCommand.scala | Scala | mit | 285 |
package one.lockstep
package util
package codec
import scodec._
import scodec.codecs._
import scala.collection.SortedMap
import scala.reflect.ClassTag
//TODO: simple eager codecs, add 'sized()' combinator for collections
trait CollectionCodecsModule {
/**
* codec for vector of limited size (size must be limit or less)
*/
def limitedVector[A](limit: Int, countCodec: Codec[Int] = uint8, valueCodec: Codec[A])
: Codec[Vector[A]] = {
val sizedVectorCodec: Codec[(Int, Vector[A])] =
countCodec.flatZip[Vector[A]] { n =>
if (n > limit)
fail(Err(s"$n entries, exceeding limit of $limit entries"))
else
vectorOfN(provide(n), valueCodec)
}
sizedVectorCodec.xmap[Vector[A]](
{ case cnt ~ vec => vec },
{ vec => vec.size ~ vec }
)
}
/**
* codec for vector of fixed size
*/
def vector[A](size: Int, valueCodec: Codec[A]): Codec[Vector[A]] = {
vectorOfN[A](provide(size), valueCodec).emap[Vector[A]] { vec =>
if (vec.size != size)
Attempt.failure(Err(s"encountered vector with size ${vec.size}, expected $size"))
else
Attempt.successful(vec)
}.fuse
}
/**
* codec for array of limited size
*/
def limitedList[A : ClassTag](limit: Int, countCodec: Codec[Int], valueCodec: Codec[A]): Codec[List[A]] =
limitedVector[A](limit, countCodec, valueCodec).xmap[List[A]](_.toList, Vector(_: _*))
def array[A : ClassTag](valueCodec: Codec[A]): Codec[Array[A]] =
scodec.codecs.vector[A](valueCodec).xmap[Array[A]](_.toArray, Vector(_: _*))
/**
* codec for array of limited size
*/
def array[A : ClassTag](limit: Int, countCodec: Codec[Int], valueCodec: Codec[A]): Codec[Array[A]] =
limitedVector[A](limit, countCodec, valueCodec).xmap[Array[A]](_.toArray, Vector(_: _*))
/**
* codec for array of fixed size
*/
def array[A : ClassTag](size: Int, valueCodec: Codec[A]): Codec[Array[A]] =
vector[A](size, valueCodec).xmap[Array[A]](_.toArray, Vector(_: _*))
def set[A: ClassTag](valueCodec: Codec[A]): Codec[Set[A]] =
scodec.codecs.vector[A](valueCodec).xmap[Set[A]](_.toSet, _.toVector)
def set[A: ClassTag](limit: Int, countCodec: Codec[Int], valueCodec: Codec[A]): Codec[Set[A]] =
limitedVector[A](limit, countCodec, valueCodec).xmap[Set[A]](_.toSet, _.toVector)
def seq[A: ClassTag](valueCodec: Codec[A]): Codec[Seq[A]] =
scodec.codecs.vector[A](valueCodec).xmap[Seq[A]](_.toSeq, _.toVector)
def seq[A: ClassTag](limit: Int, countCodec: Codec[Int], valueCodec: Codec[A]): Codec[Seq[A]] =
limitedVector[A](limit, countCodec, valueCodec).xmap[Seq[A]](_.toSeq, _.toVector)
/**
* codec for map
*/
def map[K, V](countCodec: Codec[Int], entryCodec: Codec[(K, V)])
: Codec[Map[K, V]] =
scodec.codecs.vectorOfN[(K, V)](countCodec, entryCodec).xmap[Map[K, V]](_.toMap, _.toVector)
/**
* codec for map of limited size
*/
def map[K, V](limit: Int, countCodec: Codec[Int], entryCodec: Codec[(K, V)])
: Codec[Map[K, V]] =
limitedVector[(K, V)](limit, countCodec, entryCodec).xmap[Map[K, V]](_.toMap, _.toVector)
/**
* codec for map of fixed size
*/
def map[K, V](size: Int, entryCodec: Codec[(K, V)]): Codec[Map[K, V]] =
map(size, provide(size), entryCodec)
/**
* codec for sorted map of limited size
*/
def sortedMap[K, V](limit: Int, countCodec: Codec[Int], entryCodec: Codec[(K, V)])
(implicit keyOrdering: Ordering[K])
: Codec[SortedMap[K, V]] =
limitedVector[(K, V)](limit, countCodec, entryCodec).xmap[SortedMap[K, V]](SortedMap[K, V](_: _*), _.toVector)
/**
* codec for sorted map of fixed size
*/
def sortedMap[K, V](size: Int, entryCodec: Codec[(K, V)])
(implicit keyOrdering: Ordering[K])
: Codec[SortedMap[K, V]] =
sortedMap(size, provide(size), entryCodec)
//fixme express collection of fixed size or limited size in terms of times() constraint
// TODO pending resurrection
// class CodecLimitBuilder private[Codecs] (limit: Int, sizeCodec: Codec[Int]) {
// private def limitCodec = LimitCodec(limit, sizeCodec)
//
// /** limit codec for up to `limit` bits */
// def bits: Codec[A] = variableSizeBits(limitCodec, codec)
//
// /** limit codec for up to `limit` bytes */
// def bytes: Codec[A] = variableSizeBytes(limitCodec, codec)
//
// /** limit codec multiplicity for up to `limit` times */
// object times {
//
// /** derive codec for storing up to `limit` instances of `A` in a `Seq`-derived collection */
// def as[F[_] <: Seq[_]](implicit cbf: CanBuildFrom[F[A], A, F[A]]): Codec[F[A]] = {
//
// def seqCodec(limit: Int): Codec[F[A]] = new Codec[F[A]] {
// override def encode(fa: F[A]) = encodeSeq(codec)(fa.asInstanceOf[immutable.Seq[A]])
// override def decode(bits: BitVector) = Decoder.decodeCollect[F, A](codec, Some(limit))(bits)
// }
//
// (limitCodec >>~ seqCodec).xmap[F[A]]({ case (_, fa) ⇒ fa }, fa ⇒ (fa.length, fa))
// }
//
// /** derive codec for storing up to `limit` instances of `A` in an array */
// def as[F[_] <: Array[A]](implicit ev: ClassTag[A]): Codec[Array[A]] = {
//
// def seqCodec(limit: Int): Codec[Array[A]] = new Codec[Array[A]] {
// override def encode(fa: Array[A]) = encodeSeq(codec)(fa.to[immutable.Seq])
// override def decode(bits: BitVector) = Decoder.decodeCollect[Array, A](codec, Some(limit))(bits)
// }
//
// (limitCodec >>~ seqCodec).xmap[Array[A]]({ case (_, fa) ⇒ fa }, fa ⇒ (fa.length, fa))
// }
//
// }
// }
}
| lockstep-one/vault | vault-common/src/main/scala/one/lockstep/util/codec/CollectionCodecsModule.scala | Scala | agpl-3.0 | 5,768 |
package com.github.jeanadrien.gatling.mqtt.actions
/**
*
*/
object PayloadComparison {
val sameBytesContent : Array[Byte] => Array[Byte] => Boolean = (a => b => a.deep == b.deep)
}
| jeanadrien/gatling-mqtt-protocol | src/main/scala/com/github/jeanadrien/gatling/mqtt/actions/PayloadComparison.scala | Scala | apache-2.0 | 192 |
object i0 {
abstract class i1(i2: i1)
object i3 {
trait i4
val i5 = * {}; this
}).i6
}
object i2 {
val i3: (() => Unit): Unit
def i6(i7: ki2): String | _ = i3(i2, i6, 2)
}
object i7 {
val i7 = new * * 1 i5 9 5 val i6 = i5 + i5 + + i6.i1 + i5 + 4 + i2(i4); {
i6
}.asInstanceOf[Int].mutable(App.i6[String])
}
} | som-snytt/dotty | tests/fuzzy/a4ff76c16ba3ee1d33dd1a84449ec829a9a97aa6.scala | Scala | apache-2.0 | 310 |
abstract class CaseObjectA
case object CaseObjectC extends CaseObjectA
object CaseObjectMain {
def f(): CaseObjectA = CaseObjectC
}
| epfl-lara/stainless | frontends/benchmarks/extraction/valid/CaseObject2.scala | Scala | apache-2.0 | 135 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.sources
import org.apache.hadoop.fs.Path
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.sql.catalyst.expressions.PredicateHelper
import org.apache.spark.sql.types._
class SimpleTextHadoopFsRelationSuite extends HadoopFsRelationTest with PredicateHelper {
override val dataSourceName: String = classOf[SimpleTextSource].getCanonicalName
// We have a very limited number of supported types at here since it is just for a
// test relation and we do very basic testing at here.
override protected def supportsDataType(dataType: DataType): Boolean = dataType match {
case _: BinaryType => false
// We are using random data generator and the generated strings are not really valid string.
case _: StringType => false
case _: BooleanType => false // see https://issues.apache.org/jira/browse/SPARK-10442
case _: CalendarIntervalType => false
case _: DateType => false
case _: TimestampType => false
case _: ArrayType => false
case _: MapType => false
case _: StructType => false
case _: UserDefinedType[_] => false
case _ => true
}
test("save()/load() - partitioned table - simple queries - partition columns in data") {
withTempDir { file =>
val basePath = new Path(file.getCanonicalPath)
val fs = basePath.getFileSystem(SparkHadoopUtil.get.conf)
val qualifiedBasePath = fs.makeQualified(basePath)
for (p1 <- 1 to 2; p2 <- Seq("foo", "bar")) {
val partitionDir = new Path(qualifiedBasePath, s"p1=$p1/p2=$p2")
sparkContext
.parallelize(for (i <- 1 to 3) yield s"$i,val_$i,$p1")
.saveAsTextFile(partitionDir.toString)
}
val dataSchemaWithPartition =
StructType(dataSchema.fields :+ StructField("p1", IntegerType, nullable = true))
checkQueries(
spark.read.format(dataSourceName)
.option("dataSchema", dataSchemaWithPartition.json)
.load(file.getCanonicalPath))
}
}
test("test hadoop conf option propagation") {
withTempPath { file =>
// Test write side
val df = spark.range(10).selectExpr("cast(id as string)")
df.write
.option("some-random-write-option", "hahah-WRITE")
.option("some-null-value-option", null) // test null robustness
.option("dataSchema", df.schema.json)
.format(dataSourceName).save(file.getAbsolutePath)
assert(SimpleTextRelation.lastHadoopConf.get.get("some-random-write-option") == "hahah-WRITE")
// Test read side
val df1 = spark.read
.option("some-random-read-option", "hahah-READ")
.option("some-null-value-option", null) // test null robustness
.option("dataSchema", df.schema.json)
.format(dataSourceName)
.load(file.getAbsolutePath)
df1.count()
assert(SimpleTextRelation.lastHadoopConf.get.get("some-random-read-option") == "hahah-READ")
}
}
}
| spark0001/spark2.1.1 | sql/hive/src/test/scala/org/apache/spark/sql/sources/SimpleTextHadoopFsRelationSuite.scala | Scala | apache-2.0 | 3,747 |
package com.eneco.trading.kafka.connect.twitter
import org.scalatest.{FunSuite, Matchers, BeforeAndAfter}
/**
* Created by andrew@datamountaineer.com on 29/02/16.
* kafka-connect-twitter
*/
trait TestTwitterBase extends FunSuite with Matchers with BeforeAndAfter {
def getConfig = {
Map(TwitterSourceConfig.CONSUMER_KEY_CONFIG->"test",
TwitterSourceConfig.CONSUMER_SECRET_CONFIG->"c-secret",
TwitterSourceConfig.SECRET_CONFIG->"secret",
TwitterSourceConfig.TOKEN_CONFIG->"token",
TwitterSourceConfig.TRACK_TERMS->"term1",
TwitterSourceConfig.TWITTER_APP_NAME->"myApp",
TwitterSourceConfig.BATCH_SIZE->"1337",
TwitterSourceConfig.TOPIC->"just-a-topic"
)
}
def getSinkConfig = {
Map(TwitterSinkConfig.CONSUMER_KEY_CONFIG->"test",
TwitterSinkConfig.CONSUMER_SECRET_CONFIG->"c-secret",
TwitterSinkConfig.SECRET_CONFIG->"secret",
TwitterSinkConfig.TOKEN_CONFIG->"token",
TwitterSinkConfig.TOPICS->"just-a-sink-topic"
)
}
}
| Eneco/kafka-connect-twitter | src/test/scala/com/eneco/trading/kafka/connect/twitter/TestTwitterBase.scala | Scala | apache-2.0 | 1,017 |
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO3
package com.komanov.serialization.domain.protos.events
@SerialVersionUID(0L)
final case class SiteRevisionSetPb(
revision: Long = 0L
) extends com.trueaccord.scalapb.GeneratedMessage with com.trueaccord.scalapb.Message[SiteRevisionSetPb] with com.trueaccord.lenses.Updatable[SiteRevisionSetPb] {
@transient
private[this] var __serializedSizeCachedValue: Int = 0
private[this] def __computeSerializedValue(): Int = {
var __size = 0
if (revision != 0L) { __size += com.google.protobuf.CodedOutputStream.computeUInt64Size(1, revision) }
__size
}
final override def serializedSize: Int = {
var read = __serializedSizeCachedValue
if (read == 0) {
read = __computeSerializedValue()
__serializedSizeCachedValue = read
}
read
}
def writeTo(output: com.google.protobuf.CodedOutputStream): Unit = {
{
val __v = revision
if (__v != 0L) {
output.writeUInt64(1, __v)
}
};
}
def mergeFrom(__input: com.google.protobuf.CodedInputStream): com.komanov.serialization.domain.protos.events.SiteRevisionSetPb = {
var __revision = this.revision
var _done__ = false
while (!_done__) {
val _tag__ = __input.readTag()
_tag__ match {
case 0 => _done__ = true
case 8 =>
__revision = __input.readUInt64()
case tag => __input.skipField(tag)
}
}
com.komanov.serialization.domain.protos.events.SiteRevisionSetPb(
revision = __revision
)
}
def withRevision(__v: Long): SiteRevisionSetPb = copy(revision = __v)
def getField(__field: com.google.protobuf.Descriptors.FieldDescriptor): scala.Any = {
__field.getNumber match {
case 1 => {
val __t = revision
if (__t != 0L) __t else null
}
}
}
override def toString: String = com.trueaccord.scalapb.TextFormat.printToUnicodeString(this)
def companion = com.komanov.serialization.domain.protos.events.SiteRevisionSetPb
}
object SiteRevisionSetPb extends com.trueaccord.scalapb.GeneratedMessageCompanion[SiteRevisionSetPb] {
implicit def messageCompanion: com.trueaccord.scalapb.GeneratedMessageCompanion[SiteRevisionSetPb] = this
def fromFieldsMap(__fieldsMap: Map[com.google.protobuf.Descriptors.FieldDescriptor, scala.Any]): com.komanov.serialization.domain.protos.events.SiteRevisionSetPb = {
require(__fieldsMap.keys.forall(_.getContainingType() == descriptor), "FieldDescriptor does not match message type.")
val __fields = descriptor.getFields
com.komanov.serialization.domain.protos.events.SiteRevisionSetPb(
__fieldsMap.getOrElse(__fields.get(0), 0L).asInstanceOf[Long]
)
}
def descriptor: com.google.protobuf.Descriptors.Descriptor = EventsProto.descriptor.getMessageTypes.get(3)
def messageCompanionForField(__field: com.google.protobuf.Descriptors.FieldDescriptor): com.trueaccord.scalapb.GeneratedMessageCompanion[_] = throw new MatchError(__field)
def enumCompanionForField(__field: com.google.protobuf.Descriptors.FieldDescriptor): com.trueaccord.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__field)
lazy val defaultInstance = com.komanov.serialization.domain.protos.events.SiteRevisionSetPb(
)
implicit class SiteRevisionSetPbLens[UpperPB](_l: com.trueaccord.lenses.Lens[UpperPB, SiteRevisionSetPb]) extends com.trueaccord.lenses.ObjectLens[UpperPB, SiteRevisionSetPb](_l) {
def revision: com.trueaccord.lenses.Lens[UpperPB, Long] = field(_.revision)((c_, f_) => c_.copy(revision = f_))
}
final val REVISION_FIELD_NUMBER = 1
}
| dkomanov/scala-serialization | scala-serialization/src/main/scala/com/komanov/serialization/domain/protos/events/SiteRevisionSetPb.scala | Scala | mit | 3,765 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qscript
import slamdata.Predef._
import quasar.fp.ski._
import quasar.{RenderTree, Terminal}
import monocle.Iso
import scalaz._
sealed abstract class Hole
final case object SrcHole extends Hole
object Hole {
def apply(): Hole = SrcHole
def unit = Iso[Hole, Unit](κ(()))(κ(SrcHole))
implicit val equal: Equal[Hole] = Equal.equalA
implicit val show: Show[Hole] = Show.showFromToString
implicit val renderTree: RenderTree[Hole] = RenderTree.make(κ(Terminal(List("○"), None)))
}
| slamdata/slamengine | qscript/src/main/scala/quasar/qscript/Hole.scala | Scala | apache-2.0 | 1,117 |
/*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.flaminem.flamy.parsing.hive
import com.flaminem.flamy.model._
import com.flaminem.flamy.model.columns.{ColumnValue, NoValue}
import com.flaminem.flamy.model.exceptions.UnexpectedBehaviorException
import com.flaminem.flamy.model.names.TableName
import com.flaminem.flamy.parsing.hive.HiveToken.PartitionVar
import com.flaminem.flamy.parsing.hive.ast.NodeFactory
import com.flaminem.flamy.parsing.model.{ColumnDependency, TableDependency}
import org.antlr.runtime.CommonToken
import org.antlr.runtime.tree.{BaseTree, Tree}
import org.apache.hadoop.hive.metastore.api.FieldSchema
import org.apache.hadoop.hive.ql.ErrorMsg
import org.apache.hadoop.hive.ql.lib.Node
import org.apache.hadoop.hive.ql.parse.{HiveParser, _}
import scala.collection.JavaConversions._
import scala.language.implicitConversions
/**
* Created by fpin on 8/7/15.
*/
object HiveParserUtils {
/** Important: we declare an implicit conversion from node to ASTNode */
implicit def treeToASTNode(node: Tree): ASTNode = {
node.asInstanceOf[ASTNode]
}
implicit def nodeToASTNode(node: Node): ASTNode = {
node.asInstanceOf[ASTNode]
}
/**
* Get the children of a Node, avoid nulls for leaves.
* @param pt
* @return
*/
def getChildren(pt: ASTNode): Seq[ASTNode] = {
if(pt.getChildCount > 0){
pt.getChildren.toSeq.map{_.asInstanceOf[ASTNode]}
}
else {
Nil
}
}
/**
* Set the children of a Node, and returns it.
* @param pt
* @return
*/
def setChildren(pt: ASTNode, children: Node*): ASTNode = {
for(i <- (0 until pt.getChildCount).reverse){
pt.deleteChild(i)
}
for(child <- children){
pt.addChild(child)
}
pt
}
/**
* Return a fresh copy of the tree
*/
def copy(tree: ASTNode): ASTNode = {
val node = new ASTNode(new CommonToken(tree.getType, tree.toString))
setChildren(node, getChildren(tree).map{copy}:_*)
node
}
def getLastChild(tree: BaseTree): ASTNode =
tree match {
case _ if tree.getChildCount == 0 => throw new IllegalArgumentException("This tree has no child")
case _ => tree.getChild(tree.getChildren.size - 1).asInstanceOf[ASTNode]
}
def getTableDependency(tableType: TableType, pt: ASTNode): TableDependency = {
new TableDependency(getTable(tableType, pt))
}
def getTable(tableType: TableType, pt: ASTNode): Table = {
assert(pt.getType == HiveParser.TOK_TABNAME, "This should not happen, please report a bug.")
if (pt.getChildCount == 1) {
/* SELECT * FROM table */
val tableName: String = getName(pt.getChild(0))
new Table(tableType, tableName)
}
else {
/* SELECT * FROM schema.table */
val schemaName: String = getName(pt.getChild(0))
val tableName: String = getName(pt.getChild(1))
new Table(tableType, tableName, schemaName)
}
}
/*
TOK_TAB
├──TOK_TABNAME
│ └──T1
└──TOK_PARTSPEC
└──TOK_PARTVAL
├──day
└──2016-01-01
*/
def getPartitions(pt: ASTNode): Seq[PartitionColumn] = {
assert(pt.getType == HiveParser.TOK_TAB)
val tok_partvals: Seq[ASTNode] =
pt.findNodesWithTypes(
allowedTypes = Set(HiveParser.TOK_INSERT, HiveParser.TOK_DESTINATION, HiveParser.TOK_TAB, HiveParser.TOK_PARTSPEC),
finalTypes = Set(HiveParser.TOK_PARTVAL)
)
for(tok_partval <- tok_partvals) yield {
val name: String = getName(tok_partval.getChild(0))
val value: ColumnValue =
if (tok_partval.getChildCount > 1) {
ColumnValue(getName(tok_partval.getChild(1)))
}
else {
NoValue
}
new PartitionColumn(name, value)
}
}
@throws(classOf[SemanticException])
def getDestColumn(pt: ASTNode, aliases: AliasMap, columnCount: Int): Option[Column] = {
if (HiveParser.TOK_SELEXPR != pt.getType) {
None
}
else {
getChildren(pt).toList match {
case expr::_ if expr.getType == HiveParser.TOK_ALLCOLREF =>
Some(parseStar(expr, aliases))
case expr::_ if expr.getType == HiveParser.TOK_TRANSFORM /* REDUCE ... */ =>
None
case expr::Nil =>
val (value, alias) = getColumnValue(expr, aliases, columnCount)
Some(new Column(alias, None, None, value))
case expr::alias::Nil =>
val (value, _) = getColumnValue(expr, aliases, columnCount)
Some(new Column(getName(alias), None, None, value))
case _ => None
}
}
}
/**
* If the value is a partition variable, we don't want to keep the ColumnDependency,
* as it might be refering to a column name outside of the current subquery scope.
* @param s
* @return
*/
private def getColumnValueNoPartitionVariable(s: String) = {
ColumnValue(s) match {
case cd: ColumnDependency => NoValue
case v => v
}
}
private def getColumnValue(pt: ASTNode, aliases: AliasMap, columnCount: Int): (ColumnValue, String) = {
pt.getType match {
case HiveParser.StringLiteral =>
(getColumnValueNoPartitionVariable(getName(pt)), "_c" + columnCount)
case HiveParser.Number =>
(ColumnValue(getName(pt)), "_c" + columnCount)
case HiveParser.DOT /* SELECT T.col FROM T */ =>
/* We need to avoid a nasty corner case for map["key"].sub_value */
val hasBrackets = pt.findNodesWithTypes(Set(HiveParser.DOT), Set(HiveParser.LSQUARE)).nonEmpty
if(hasBrackets){
(NoValue, getName(pt.getChild(1)))
}
else{
val col: ColumnDependency = parseDot(pt, aliases)
(col, col.columnName.split("[.]").last)
}
case HiveParser.TOK_TABLE_OR_COL /* SELECT col FROM T */ =>
val colName = getName(pt.getChild(0))
(new ColumnDependency(colName), colName)
case _ =>
(NoValue, "_c" + columnCount)
}
}
/*
TOK_INSERT
├──TOK_DESTINATION
│ └──TOK_DIR
│ └──TOK_TMP_FILE
├──TOK_SELECT | TOK_SELECTDI
│ ├──TOK_SELEXPR
│ └──[TOK_SELEXPR]
...
*/
@throws(classOf[SemanticException])
def getDestColumns(tree: ASTNode): Seq[(String, ASTNode)] = {
assert(tree.getType == HiveParser.TOK_INSERT)
for{
tok_select <- tree.getChildrenWithTypes(Set(HiveParser.TOK_SELECT, HiveParser.TOK_SELECTDI))
(tok_selexpr, i) <- tok_select.getChildrenWithTypes(Set(HiveParser.TOK_SELEXPR)).zipWithIndex
} yield {
val col = Option(tok_selexpr.getChild(1)).map{getName(_)}.getOrElse("_c"+i)
val tree = tok_selexpr.getChild(0).asInstanceOf[ASTNode]
col -> tree
}
}
/**
* Get the list of column names defined in an insert
* @param tree
* @return
*/
def getDestColumnNames(tree: ASTNode): Seq[String] = {
getDestColumns(tree).map{_._1}
}
/**
* Get the list of columns defined in a CREATE TABLE.
* @param pt
* @throws org.apache.hadoop.hive.ql.parse.SemanticException
* @return
*/
@throws(classOf[SemanticException])
def getColumns(pt: ASTNode): Seq[Column] = {
BaseSemanticAnalyzer.getColumns(pt, true).map{
case col: FieldSchema => new Column(col)
}
}
def findTable(tableRef: String, aliases: AliasMap): Option[Table] = {
aliases(tableRef)
}
def getName(node: ASTNode): String = {
BaseSemanticAnalyzer.getUnescapedName(node)
}
/**
* Recursively parse multiple dots.
* In the special case 't.map["key"].sub_value', we return Seq("t", "map").
*
* @param tree
* @return
*/
def parseDot(tree: ASTNode): Seq[String] = {
/* We can't use recParse here, because the map["key"].sub_value is too complex for it */
def aux(pt: ASTNode, res: List[String]): Seq[String] = {
pt.getType match {
case HiveParser.TOK_TABLE_OR_COL => getName(pt.getChild(0)) :: res
case HiveParser.DOT => aux(pt.getChild(0), getName(pt.getChild(1)) :: res)
case HiveParser.LSQUARE => aux(pt.getChild(0), Nil)
case _ => throw new UnexpectedBehaviorException()
}
}
aux(tree, Nil)
}
def parseDot(pt: ASTNode, aliases: AliasMap): ColumnDependency = {
/* Two cases are possible:
* 1) SELECT T.col FROM T
* or
* 2) SELECT col.attr FROM T
*/
val col: Seq[String] = parseDot(pt)
val tableOrCol = col.head
/* we determine which case we're in : */
findTable(tableOrCol, aliases) match {
case Some(table) =>
/* 1) SELECT T.col FROM T */
new ColumnDependency(col(1), table.tableName, table.getSchemaName)
case None =>
/* 2) SELECT col.attr FROM T */
new ColumnDependency(col.mkString("."))
}
}
@throws(classOf[SemanticException])
def getColumnDependency(pt: ASTNode, aliases: AliasMap): Option[ColumnDependency] = {
pt.getType match {
case HiveParser.TOK_ALLCOLREF if pt.getChildCount == 1 =>
/* SELECT T.* FROM T */
val col = "*"
val tableRef = getName(getLastChild(getLastChild(pt)))
findTable(tableRef, aliases) match {
case None => throw new SemanticException(ErrorMsg.INVALID_TABLE_OR_COLUMN.getMsg("'" + tableRef + "'"))
case Some(table) => Some(new ColumnDependency(col, table.tableName, table.getSchemaName))
}
case HiveParser.TOK_ALLCOLREF =>
/* SELECT * FROM T */
Some(new ColumnDependency("*"))
case HiveParser.DOT =>
Some(parseDot(pt, aliases))
case HiveParser.TOK_TABLE_OR_COL =>
Some(new ColumnDependency(getName(getLastChild(pt))))
case _ =>
None
}
}
/**
* Return a column with the given name
* @param pt
* @return
*/
def getColumn(pt: ASTNode): Option[Column] = {
pt.getType match {
case HiveParser.DOT | HiveParser.TOK_TABLE_OR_COL => Some(new Column(getName(getLastChild(pt))))
case HiveParser.TOK_SELEXPR if pt.getChildCount > 1 => Some(new Column(getName(getLastChild(pt))))
case HiveParser.TOK_SELEXPR => getColumn(pt.getChild(0))
case _ => None
}
}
def parseStar(pt: ASTNode, aliases: AliasMap): Column = {
if (pt.getChildCount == 1) {
/* SELECT T.* FROM T */
val tableRef: String = getName(getLastChild(getLastChild(pt)))
findTable(tableRef, aliases) match {
case None => throw new SemanticException(ErrorMsg.INVALID_TABLE_OR_COLUMN.getMsg("'" + tableRef + "'"))
case Some(table) => new Column(table.fullName + ".*")
}
}
else {
/* SELECT * FROM T */
new Column("*")
}
}
/**
* Transforms "c1 AND c2 AND ..." into Seq(c1, c2, ...)
* @param pt
* @return
*/
def andToClauses(pt: ASTNode): Seq[ASTNode] = {
pt.getType match {
case HiveParser.KW_AND => getChildren(pt).flatMap{andToClauses}
case _ => pt::Nil
}
}
/**
* Transforms Seq(c1, c2, ...) into "c1 AND c2 AND ..."
* @param clauses
* @return
*/
def clausesToAnd(clauses: Seq[ASTNode]): Option[ASTNode] = {
clauses.reduceOption{NodeFactory.KW_AND}
}
/**
* if multi-insert, we check that they insert into the same tableName and return it.
*/
private def resolveMultiInsertTableNames(tableNames: Seq[TableName]): TableName = {
if(tableNames.isEmpty) {
throw new UnexpectedBehaviorException("No insert found")
}
else {
val ss = tableNames.distinct
if(ss.size > 1){
throw new FlamyParsingException("When doing multi-insert, all inserts must go to the same table. " +
s"Found ${ss.take(2).mkString(" and ")}")
}
tableNames.head
}
}
/**
* Return the name of the table being inserted into.
* @param tree
* @return
*/
def getInsertedTableName(tree: ASTNode): TableName = {
assert(tree.getType == 0)
val tok_tabnames: Seq[ASTNode] =
tree.getChild(0).asInstanceOf[ASTNode]
.findNodesWithTypes(
Set(
HiveParser.TOK_QUERY,
HiveParser.TOK_INSERT,
HiveParser.TOK_DESTINATION,
HiveParser.TOK_INSERT_INTO,
HiveParser.TOK_TAB
),
Set(HiveParser.TOK_TABNAME)
)
val tableNames = tok_tabnames
.map{getChildren(_)}
.map{
case Seq(schema, table) => TableName(getName(schema), getName(table))
case s => throw new UnexpectedBehaviorException(s"Table names should be fully qualified. Got: ${s.mkString(".")}")
}
resolveMultiInsertTableNames(tableNames)
}
/**
* Returns true if at least one of the nodes of this tree corresponds to a partition variable
*/
def getPartitionVariables(tree: ASTNode): Seq[String] = {
tree.recParse{
case PartitionVar(name) => name::Nil
}
}
def hasPartitionVariables(tree: ASTNode): Boolean = {
getPartitionVariables(tree).nonEmpty
}
implicit class ASTNodeExtension(that: ASTNode) {
/**
* Walks down the tree and find all nodes of type in finalTypes
* that are reachable by only traversing nodes of type in allowedTypes
*
* @param allowedTypes
* @param finalTypes
* @return
*/
def findNodesWithTypes(allowedTypes: Set[Int], finalTypes: Set[Int]): Seq[ASTNode] = {
recParse{
case pt if finalTypes.contains(pt.getType) => pt::Nil
case pt if !allowedTypes.contains(pt.getType) => Nil
}
}
/**
* @param types
* @return the list of the children of that node that have the specified type.
*/
def getChildrenWithTypes(types: Set[Int]): Seq[ASTNode] = {
getChildren(that).filter{pt => types.contains(pt.getType)}
}
/**
* @param tpe
* @return the list of the children of that node that have the specified type.
*/
def getChildrenWithType(tpe: Int): Seq[ASTNode] = {
getChildren(that).filter{_.getType == tpe}
}
/**
* Replace the child with the specified type with the provided optional replacement.
* If is node has no child with the specified type, nothing is done.
* @param tpe
* @param replacement
* @return
*/
def replaceChildWithType(tpe: Int, replacement: Option[ASTNode]): ASTNode = {
val (before, after) = getChildren(that).span{_.getType != tpe}
if(after.isEmpty){
that
}
else{
setChildren(that, before++replacement++after.tail:_*)
}
}
/**
* Apply a function to the children this nodes, and replace them with the result.
* @param f
* @return this tree, after the transformation has been applied to its children.
*/
def transformChildren(f: Function[ASTNode, Seq[ASTNode]]): ASTNode = {
setChildren(that, getChildren(that).flatMap{f}:_*)
}
/**
* Apply a function to the children this nodes, and replace them with the result.
* @param f
* @return this tree, after the transformation has been applied to its children.
*/
def filterChildren(f: Function[ASTNode, Boolean]): ASTNode = {
setChildren(that, getChildren(that).filter{f}:_*)
}
/**
* Recursively apply a partial function to a tree, returning the result.
* If the partial function doesn't match node, we recursively visit its children
* @param pf
* @tparam T
* @return
*/
def recParse[T](pf: PartialFunction[(ASTNode), Seq[T]]): Seq[T] = {
recParse(Set[Int]())(pf)
}
/**
* Recursively apply a partial function to a tree, returning the result.
* If the partial function doesn't match a node, we recursively visit its children
* @param allowedTypes The recursion will only explore the children of the nodes whose type is in this set.
* @param pf
* @tparam T
* @return
*/
def recParse[T](allowedTypes: Set[Int])(pf: PartialFunction[(ASTNode), Seq[T]]): Seq[T] = {
def defaultRecParse(pt: ASTNode): Seq[T] = {
if(allowedTypes.isEmpty || allowedTypes.contains(that.getType)) {
getChildren(pt).flatMap{_.recParse(allowedTypes){pf}}
}
else {
Nil
}
}
pf.applyOrElse(that, defaultRecParse)
}
/**
* Recursively apply a partial transformation function to a tree, returning the transformed tree.
* If the partial function doesn't match a node, we recursively transform its children
* @param pf
* @return
*/
def recTransform(pf: PartialFunction[(ASTNode), Seq[ASTNode]]): Seq[ASTNode] = {
recTransform(Set[Int]()){pf}
}
/**
* Recursively apply a partial transformation function to a tree, returning the transformed tree.
* If the partial function doesn't match a node, we recursively transform its children
* @param allowedTypes The recursion will only explore the children of the nodes whose type is in this set.
* @param pf
* @return
*/
def recTransform(allowedTypes: Set[Int])(pf: PartialFunction[(ASTNode), Seq[ASTNode]]): Seq[ASTNode] = {
def defaultRecParse(pt: ASTNode): Seq[ASTNode] = {
if(allowedTypes.isEmpty || allowedTypes.contains(that.getType)) {
pt.transformChildren{_.recTransform(allowedTypes){pf}}::Nil
}
else {
Nil
}
}
pf.applyOrElse(that, defaultRecParse)
}
}
@throws(classOf[ParseException])
private def recDrawTree(prefix: String, tree: ASTNode): String = {
val sb: StringBuilder = new StringBuilder
if (prefix.length > 0) {
if (prefix.endsWith("│ ")) {
sb.append(prefix.substring(0, prefix.length - 3) + "├──")
}
else {
sb.append(prefix.substring(0, prefix.length - 3) + "└──")
}
}
else {
sb.append(prefix)
}
sb.append(tree.toString + "\\n")
tree.getChildren match {
//noinspection ScalaStyle
case null => ()
case children =>
children.dropRight(1).foreach {
case child => sb.append(recDrawTree(prefix + "│ ", child.asInstanceOf[ASTNode]))
}
sb.append(recDrawTree(prefix + " ", children.last.asInstanceOf[ASTNode]))
}
sb.toString()
}
/**
* Draw a syntactic tree. Used for debugging.
* @param tree
* @return
*/
def drawTree(tree: ASTNode): String = {
try {
recDrawTree("", tree)
}
catch {
case e: ParseException =>
e.printStackTrace()
""
}
}
}
| flaminem/flamy | src/main/scala/com/flaminem/flamy/parsing/hive/HiveParserUtils.scala | Scala | apache-2.0 | 19,080 |
package org.jetbrains.sbt
package project.structure
import java.io.File
import com.intellij.openapi.application.PathManager
import com.intellij.openapi.util.io.FileUtil
/**
* @author Pavel Fatin
*/
class SbtException(message: String) extends Exception(message)
object SbtException {
val ACCEPTABLE_TO_DISPLAY_LOG_SIZE = 20
import Utils._
def fromSbtLog(log: String): SbtException = {
val lines = log.lines.toSeq
if (lines.exists(_.startsWith("sbt.ResolveException")))
handleUnresolvedDeps(lines)
else
new SbtException(SbtBundle("sbt.import.error", log))
}
private def handleUnresolvedDeps(lines: Seq[String]): SbtException = {
val dependencies = lines.foldLeft("") { (acc, line) =>
if (line.startsWith("[warn]")) {
val trimmed = line.substring(6).trim
if (trimmed.startsWith(":: ") && !trimmed.contains("UNRESOLVED DEPENDENCIES"))
acc + s"<li>${trimmed.substring(2)}</li>"
else
acc
} else
acc
}
new SbtException(SbtBundle("sbt.import.unresolvedDependencies", dependencies,
dumpLog(joinLines(lines)).toURI.toString))
}
private object Utils {
def joinLines(lines: Seq[String]): String =
lines.mkString(System.getProperty("line.separator"))
def dumpLog(log: String): File = {
val logDir = new File(PathManager.getLogPath)
logDir.mkdirs()
val file = new File(logDir, "sbt.last.log")
file.createNewFile()
file.write(log)
file.getAbsoluteFile
}
}
} | jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/sbt/project/structure/SbtException.scala | Scala | apache-2.0 | 1,547 |
import sbt._
import Keys._
import AndroidKeys._
object AndroidHelpers {
def directory(path: SettingKey[File]) = path map (IO.createDirectory(_))
def determineAndroidSdkPath(es: Seq[String]) = {
val paths = for ( e <- es; p = System.getenv(e); if p != null) yield p
if (paths.isEmpty) None else Some(Path(paths.head).asFile)
}
def isWindows = System.getProperty("os.name").startsWith("Windows")
def osBatchSuffix = if (isWindows) ".bat" else ""
def dxMemoryParameter(javaOpts: String) = {
// per http://code.google.com/p/android/issues/detail?id=4217, dx.bat
// doesn't currently support -JXmx arguments. For now, omit them in windows.
if (isWindows) "" else javaOpts
}
def usesSdk(mpath: File, schema: String, key: String) =
(manifest(mpath) \\ "uses-sdk").head.attribute(schema, key).map(_.text.toInt)
def adbTask(dPath: String, emulator: Boolean, s: TaskStreams, action: String*) {
val (exit, out) = adbTaskWithOutput(dPath, emulator, s, action:_*)
if (exit != 0 ||
// adb doesn't bother returning a non-zero exit code on failure
out.toString.contains("Failure")) {
s.log.error(out.toString)
sys.error("error executing adb")
} else s.log.info(out.toString)
}
def adbTaskWithOutput(dPath: String, emulator: Boolean, s: TaskStreams, action: String*) = {
val adb = Seq(dPath, if (emulator) "-e" else "-d") ++ action
s.log.debug(adb.mkString(" "))
val out = new StringBuffer
val exit = adb.run(new ProcessIO(input => (),
output => out.append(IO.readStream(output)),
error => out.append(IO.readStream(error)),
inheritedInput => false)
).exitValue()
(exit, out.toString)
}
def startTask(emulator: Boolean) =
(dbPath, manifestSchema, manifestPackage, manifestPath, streams) map {
(dp, schema, mPackage, amPath, s) =>
adbTask(dp.absolutePath,
emulator, s,
"shell", "am", "start", "-a", "android.intent.action.MAIN",
"-n", mPackage+"/"+
launcherActivity(schema, amPath.head, mPackage))
}
def launcherActivity(schema: String, amPath: File, mPackage: String) = {
val launcher = for (
activity <- (manifest(amPath) \\\\ "activity");
action <- (activity \\\\ "action");
name = action.attribute(schema, "name").getOrElse(sys.error{
"action name not defined"
}).text;
if name == "android.intent.action.MAIN"
) yield {
val act = activity.attribute(schema, "name").getOrElse(sys.error("activity name not defined")).text
if (act.contains(".")) act else mPackage+"."+act
}
launcher.headOption.getOrElse("")
}
def manifest(mpath: File) = xml.XML.loadFile(mpath)
}
| taisukeoe/sbt-android-plugin | src/main/scala/AndroidHelpers.scala | Scala | bsd-3-clause | 2,832 |
package edu.arizona.sista.twitter4food
import java.io._
object Serialization {
def save[A <: Serializable](model: A, serializedFile: File) = {
val oos = new ObjectOutputStream(new FileOutputStream(serializedFile))
oos.writeObject(model)
oos.close
}
def load[A](serializedFile: File): A = {
val ois = new ObjectInputStream(new FileInputStream (serializedFile))
val obj = ois.readObject()
ois.close
obj.asInstanceOf[A]
}
def load[A](serializedFilename: String): A = load(new File(serializedFilename))
def main(args: Array[String]) = {
val tweets: List[Tweet] = load(new File("/home/dfried/tweets_small.dat"))
println(tweets(0))
}
}
| VivianLuwenHuangfu/twitter4food | src/main/scala/edu/arizona/sista/twitter4food/Serialization.scala | Scala | apache-2.0 | 685 |
package skinny.injection
/**
* Scaldi configuration exception.
*/
case class ScaldiConfigException(msg: String, e: Throwable)
extends Exception(msg, e)
| holycattle/skinny-framework | scaldi/src/main/scala/skinny/injection/ScaldiConfigException.scala | Scala | mit | 157 |
package br.gov.lexml.parser.pl.xhtml
import java.io.CharArrayReader
import java.io.FileReader
import java.io.BufferedReader
import org.apache.commons.io.IOUtils
import org.apache.commons.io.filefilter.FileFilterUtils
import java.io.FileWriter
import java.io.BufferedWriter
import java.io.IOException
import java.io.FileOutputStream
import java.io.FileInputStream
import java.io.BufferedOutputStream
import java.io.BufferedInputStream
import java.io.InputStream
import java.io.OutputStream
import java.io.File
import scala.xml._
import scala.util.matching.Regex
import grizzled.slf4j.Logging
import org.apache.commons.io.FileUtils
import java.io.InputStreamReader
import java.io.ByteArrayInputStream
import java.io.StringReader
import scala.xml.parsing.NoBindingFactoryAdapter
import org.apache.commons.io.filefilter.PrefixFileFilter
import java.io.FileFilter
import scala.xml.parsing.XhtmlParser
import scala.io.BufferedSource
import scala.xml.Source
import scala.xml.Source
import scala.xml.Source
import scala.xml.Source
import br.gov.lexml.parser.pl.docx.DOCXReader
import scala.io.Codec
abstract class XHTMLProcessorResult
case object Failure extends XHTMLProcessorResult
case class Success(result: List[Node]) extends XHTMLProcessorResult
object TextUtils {
def fixXHTML(data: Array[Byte]) = new String(data,"utf-8")
//.replaceFirst("<!DOCTYPE (html|HTML)[^>]*>", "<!DOCTYPE html PUBLIC \\"-//W3C//DTD XHTML 1.0 Strict//EN\\" \\"" + XHTMLProcessor.dtdUrl + "\\">")
.replaceAll("\\u0007", "")
.replaceAll("\\u001f", "")
.replace(0x92: Char, '`')
.replaceAll("’", "`")
.replace(0x202d: Char, ' ')
.replace(0x202c: Char, ' ')
.replace('–', '-')
.getBytes("utf-8")
}
trait Converter {
def convert(srcExtension: String, srcData: Array[Byte], dstExtension: String): Array[Byte]
def deleteByPrefix(dir : File, prefix : String) =
dir.listFiles(new PrefixFileFilter(prefix) : FileFilter).foreach(f => FileUtils.deleteQuietly(f))
}
final class DOCXConverter(otherConverter : Converter) extends Converter {
override def convert(srcExtension: String, srcData: Array[Byte], dstExtension: String): Array[Byte] = {
(srcExtension,dstExtension) match {
case ("docx","xhtml") =>
DOCXReader.readDOCX(new ByteArrayInputStream(srcData)).
get.toString.getBytes("utf-8")
case _ => otherConverter.convert(srcExtension,srcData,dstExtension)
}
}
}
final class AbiwordConverter(val removeTemporaryFiles: Boolean = true) extends Converter with Logging {
import TextUtils._
def noPostProc(data: Array[Byte]) = data
override def convert(srcExtension: String, srcData: Array[Byte], dstExtension: String) = {
logger.info("abiword.convert: starting: srcExtension = " + srcExtension + ", dstExtension = " + dstExtension)
val srcFile = File.createTempFile("lexml-parser-pl", "." + srcExtension)
logger.info("abiword.convert: srcFile = " + srcFile)
val baseName = srcFile.getName.substring(0, srcFile.getName.length - srcExtension.length - 1)
val baseDir = srcFile.getParentFile
val destFile = new File(baseDir, baseName + "." + dstExtension)
logger.info("abiword.convert: destFile = " + destFile)
val (params, postProc) = dstExtension match {
case "xhtml" ⇒ (List("--to=xhtml", "--exp-props=html4: no; declare-xml: yes; use-awml:no; embed-css: yes; embed-images: yes"), fixXHTML(_))
case "pdf" ⇒ (List("--to=pdf"), noPostProc(_))
case _ ⇒ throw new RuntimeException("Abiword Converter does not support extension: " + dstExtension)
}
logger.info("abiword.convert: params = " + params)
try {
//val srcPath = srcFile.getCanonicalPath
FileUtils.writeByteArrayToFile(srcFile, srcData)
val cmd: Array[String] = (("/usr/bin/abiword" :: params) :+ srcFile.getPath).toArray
logger.info("running " + cmd.mkString(" "))
val p = Runtime.getRuntime.exec(cmd, Array[String](), srcFile.getParentFile)
logger.info("returned from abiword")
p.waitFor
postProc(FileUtils.readFileToByteArray(destFile))
} finally {
if (removeTemporaryFiles) {
deleteByPrefix(baseDir,baseName)
}
}
}
}
final class OpenOfficeConverter(val removeTemporaryFiles: Boolean = true) extends Converter with Logging {
val pyodconverter = "/usr/local/bin/docconverter"
import TextUtils._
def htmlPostProc(data: Array[Byte]) = {
val data2 = fixXHTML(data)
val parserFactory = new org.ccil.cowan.tagsoup.jaxp.SAXFactoryImpl
val parser = parserFactory.newSAXParser()
val source = new InputSource(new ByteArrayInputStream(data2))
val adapter = new NoBindingFactoryAdapter
val e = adapter.loadXML(source, parser)
e.toString.getBytes("utf-8")
}
def noPostProc(data: Array[Byte]) = data
override def convert(srcExtension: String, srcData: Array[Byte], dstExtension: String) = {
logger.info("oo.convert: starting: srcExtension = " + srcExtension + ", dstExtension = " + dstExtension)
val dstExtension2 = dstExtension match {
case "xhtml" ⇒ "html"
case x ⇒ x
}
val srcFile = File.createTempFile("lexml-parser-pl", "." + srcExtension)
logger.info("oo.convert: srcFile = " + srcFile)
val baseName = srcFile.getName.substring(0, srcFile.getName.length - srcExtension.length - 1)
val baseDir = srcFile.getParentFile
val destFile = new File(baseDir, baseName + "." + dstExtension2)
logger.info("oo.convert: destFile = " + destFile)
val postProc = dstExtension match {
case "xhtml" ⇒ htmlPostProc(_)
case _ ⇒ noPostProc(_)
}
try {
FileUtils.writeByteArrayToFile(srcFile, srcData)
val cmd: Array[String] = Array(pyodconverter, srcFile.getPath, destFile.getPath)
val p = Runtime.getRuntime.exec(cmd, Array[String](), srcFile.getParentFile)
p.waitFor
val res = postProc(FileUtils.readFileToByteArray(destFile))
//FileUtils.writeByteArrayToFile(new File(destFile.getParentFile,destFile.getName + ".res"),res)
res
} finally {
if (removeTemporaryFiles) {
deleteByPrefix(baseDir,baseName)
}
}
}
}
object XHTMLProcessor extends Logging {
val accept : Set[String] = Set(
"text/plain",
"text/html",
"application/rtf",
"text/rtf",
"application/msword",
"application/vnd.oasis.opendocument.text",
"application/vnd.openxmlformats-officedocument.wordprocessingml.document"
)
import TextUtils._
/* lazy val dtdFile: File = {
val f = File.createTempFile("xhtml11-", ".dtd")
f.deleteOnExit()
f
}
lazy val dtdUrl: String = {
val is = getClass.getClassLoader().getResourceAsStream("xhtml11.dtd")
val os = new BufferedOutputStream(new FileOutputStream(dtdFile))
IOUtils.copy(is, os)
IOUtils.closeQuietly(is)
IOUtils.closeQuietly(os)
dtdFile.getCanonicalFile.toURI.toURL.toString
}*/
//val converter : Converter = new AbiwordConverter
val defaultConverter: Converter = new DOCXConverter(new AbiwordConverter)
def changeChildren[T <: Seq[Node]](f: Seq[Node] ⇒ Seq[Node]) = (e: T) ⇒ {
e match {
case Elem(pref, name, attrs, scope, children @ _*) ⇒
Elem(pref, name, attrs, scope, true, (f(children)): _*).asInstanceOf[T]
case _ ⇒ e
}
}
// var removeTemporaryFiles = true
// def fixXHTML(xhtml: String) = xhtml
// .replaceFirst("<!DOCTYPE html[^>]*>", "<!DOCTYPE html PUBLIC \\"-//W3C//DTD XHTML 1.0 Strict//EN\\" \\"" + XHTMLProcessor.dtdUrl + "\\">")
// .replaceAll("\\007", "")
// .replaceAll("\\037", "")
// .replace(0x92: Char, '`')
// .replaceAll("’", "`")
// .replace(0x202d: Char, ' ')
// .replace(0x202c: Char, ' ')
//
//
//
// def abiwordConversion(extension : String) =
// (extension,convertUsingAbiword2(extension)(_))
//
// def convertUsingAbiword2(extension : String)(source : Array[Byte]) =
// convertUsingAbiword(source,extension)
//
// def convertUsingAbiword(source: Array[Byte], extension: String): Elem = {
// val srcFile = File.createTempFile("lexml-parser-pl", "." + extension)
// val xhtmlFile = new File(srcFile.getCanonicalPath.replaceFirst(extension + "$", "xhtml"))
// try {
// //val srcPath = srcFile.getCanonicalPath
// FileUtils.writeByteArrayToFile(srcFile, source)
// val cmd: Array[String] = Array(
// "/usr/bin/abiword", "--to=xhtml", srcFile.getName, "--exp-props=html4: no; declare-xml: yes; use-awml:no; embed-css: no; embed-images: no")
// val p = Runtime.getRuntime.exec(cmd, Array[String](), srcFile.getParentFile)
// p.waitFor
//
// val xhtmlData = fixXHTML(FileUtils.readFileToString(xhtmlFile))
//
// val reader = new StringReader(xhtmlData)
//
// try {
// if (!xhtmlFile.exists || xhtmlFile.length < 300) {
// throw new RuntimeException("HTML de saida do Abiword vazio")
// }
// else { XML.load(reader) }
// } finally {
// IOUtils.closeQuietly(reader)
// }
// } finally {
// if (removeTemporaryFiles) {
// srcFile.delete
// xhtmlFile.delete
// val f = new File(xhtmlFile.getPath() + "_files")
// if (f.exists() && f.isDirectory()) {
// f.listFiles().foreach(_.delete)
// f.delete
// }
// }
// }
// }
type SourceProcessor = (Array[Byte], Converter) ⇒ Elem
def xhtmlLoader(in : Array[Byte]) : Elem = xhtmlLoader(new ByteArrayInputStream(in))
def xhtmlLoader(is : InputStream) : Elem =
XhtmlParser(new BufferedSource(is)(Codec.UTF8)).collectFirst({ case e : Elem => e }).get
def externalConvertToXhtml(extension: String) = (extension, (data: Array[Byte], converter : Converter) ⇒ {
System.setProperty("file.encoding", "utf-8")
val converted = converter.convert(extension, data, "xhtml")
val r = xhtmlLoader(converted)
((r \\\\ "html").toSeq.collect { case e : Elem => e }) . head
})
val sourceProcessorMap: Map[String, (String, SourceProcessor)] = Map(
("text/plain", ("txt", (source: Array[Byte], _ : Converter) ⇒ {
val text = fixXHTML(source)
val lines = scala.io.Source.fromBytes(text,"utf-8").getLines().to(List)
def toPars(l: List[String], r: List[String] = Nil, s: List[String] = Nil): List[String] = l match {
case Nil ⇒ s match { case Nil ⇒ r; case _ ⇒ s.mkString("", " ", "") :: r }
case (x :: xs) if x.trim.length == 0 ⇒ toPars(xs, s.mkString("", " ", "") :: r)
case (x :: xs) ⇒ toPars(xs, r, x :: s)
}
val pars = toPars(lines).reverse
<html><body><div>{ pars.map(p ⇒ <p>{ p }</p>) }</div></body></html>
})),
("application/xhtml+xml", ("xhtml", (source: Array[Byte],_ : Converter) ⇒ {
val text = fixXHTML(source)
xhtmlLoader(text)
})),
("text/html", ("html", (source: Array[Byte],_ : Converter) ⇒ {
val text = fixXHTML(source)
xhtmlLoader(text)
})),
("application/rtf", externalConvertToXhtml("rtf")),
("text/rtf", externalConvertToXhtml("rtf")),
("application/msword", externalConvertToXhtml("doc")),
("application/vnd.oasis.opendocument.text", externalConvertToXhtml("odt")),
("application/vnd.openxmlformats-officedocument.wordprocessingml.document", externalConvertToXhtml("docx")))
def convertSrcToXHTML(source: Array[Byte], mimeType: String,converter : Converter): Option[Elem] =
sourceProcessorMap.get(mimeType).map(_._2(source,converter))
def convertRTFtoXHTML(rtfSource: InputStream, converter : Converter): Option[Elem] =
convertSrcToXHTML(IOUtils.toByteArray(rtfSource), "text/rtf",converter)
def selectBaseElems(root: Elem): List[Elem] = {
val body = (root \\\\ "body").head.asInstanceOf[Elem]
val belems = root.child.to(List).dropWhile ((n : Node) => n match {
case e : Elem => e.label != "body"
case _ => true
})
def getAttr(n: Node, attr: String) = n match {
case (e: Elem) ⇒ e.attributes.get(attr).map(_.text.toLowerCase)
case x ⇒ None
}
def getIdOrType(n: Node) = getAttr(n, "id").orElse(getAttr(n, "type")).getOrElse("")
val childs = trim(belems)
val childs1 = childs.filter((n: Node) ⇒ { val x = getIdOrType(n) ; x != "header" && x != "footer" })
val (cl1, cl2) = childs1.span({ case (e: Elem) ⇒ e.label == "table"; case _ ⇒ false })
val childs3 = (cl1 \\\\ "*").filter {
case e : Elem => e.label == "p" || e.label == "h1" || e.label == "h2" || e.label == "h3"
case _ => false
} ++ cl2
val r = wrapText(childs3.toList).collect { case e : Elem => e }
r
}
def chooseDivs(divs: List[Elem]) = divs
val parLabels = Set("p", "h1", "h2", "h3", "h4", "blockquote")
val isValidElem: PartialFunction[Node, Node] = (x: Node) ⇒
x match {
case e @ Elem(_, "table", _, _, _@ _*) ⇒ e
case Elem(pref, "ol", _, scope, children @ _*) ⇒ Elem(pref, "ol", Null, scope, true, children.collect(isValidElem): _*)
case Elem(pref, "li", _, scope, children @ _*) ⇒ Elem(pref, "li", Null, scope, true, children.collect(isValidElem orElse isContent): _*)
case Elem(pref, label, attrs, scope, children @ _*) if (parLabels.contains(label)) ⇒ Elem(pref, "p", attrs, scope, true, children: _*)
}
val isContent: PartialFunction[Node, Node] = ({
case t: Text ⇒ t
case e @ Elem(_, "span", _, _, _*) ⇒ e
}: PartialFunction[Node, Node]).orElse(isValidElem)
def wrapText(nl: List[Node]): List[Node] = {
val blockElems = List("table","thead","tbody","th","tr","td","p","blockquote","center","div","img")
def docollect(nl: List[Node], accum: List[Node] = Nil, accum2 : List[Node] = Nil): List[Node] = nl match {
case Nil if trim(accum).isEmpty => accum2.reverse
case Nil => (<p>{ NodeSeq fromSeq accum.reverse }</p> :: accum2).reverse
case (t: Text) :: r ⇒ docollect(r, t :: accum, accum2)
case (e: Elem) :: r if (!blockElems.contains(e.label)) ⇒ docollect(r, e :: accum, accum2)
case (e : Elem) :: r if trim(accum).isEmpty ⇒ docollect(r,Nil,e :: accum2)
case (e : Elem) :: r ⇒ docollect(r,Nil,e :: (<p>{ NodeSeq fromSeq accum.reverse }</p>) :: accum2)
}
docollect(nl, Nil)
}
def trimLeft(nl: List[Node]) = nl.dropWhile({ case t: Text ⇒ t.text.trim.isEmpty; case _ ⇒ false })
def trim(nl: List[Node]) = trimLeft(trimLeft(nl).reverse).reverse
val explodedBlockElements = Set("div", "center")
val explodedInlineElements = Set("font")
def explodeDivs(divs: List[Elem]) = {
def explode(n: Node): List[Node] = n match {
case e: Elem if explodedBlockElements.contains(e.label) ⇒ wrapText(e.child.toList).flatMap(explode)
case e: Elem if explodedInlineElements.contains(e.label) ⇒ e.child.toList.flatMap(explode)
case e: Elem if e.label == "td" ⇒
trim(e.child.toList) match {
case List(e2: Elem) if e2.label == "p" ⇒ List(e copy (child = e2.child.toList.flatMap(explode)))
case _ ⇒ List(e copy (child = e.child.toList.flatMap(explode)))
}
case e: Elem ⇒ List(e copy (child = e.child.toList.flatMap(explode)))
case x ⇒ List(x)
}
divs.flatMap(explode).collect(isValidElem)
}
def changeElem(f: Elem ⇒ Elem) = (n: Node) ⇒ n match {
case Elem(_, _, _, _, _*) ⇒ f(n.asInstanceOf[Elem])
case _ ⇒ n
}
def mapToAttributes(m: Map[String, String]) = m.foldRight(Null.asInstanceOf[MetaData])(
(kv, md) ⇒ {
val (k, v) = kv;
new UnprefixedAttribute(k, v, md);
})
def cleanAttributes: Seq[Node] ⇒ Seq[Node] = bottomUp((n: Node) ⇒ {
val changeAttrs = (f: (Map[String, String] ⇒ Map[String, String])) ⇒
(e: Node) ⇒ e match {
case Elem(pref, name, attrs, scope, children @ _*) ⇒
Elem(pref, name, mapToAttributes(f(attrs.asAttrMap)), scope, true, children: _*)
}
val allowedStyles = Set[String](
"font-weight:bold", "font-weight:bolder", "font-weight:normal", "font-style:italic", "font-style:oblique", "font-style:normal",
"text-decoration:underline","vertical-align:super", "vertical-align:sub" )
def cleanStyle(v: String) = {
v.split(";").filter(allowedStyles.contains).mkString(";")
}
val filterStyle: PartialFunction[(String, String), (String, String)] = (kv: (String, String)) ⇒ kv match {
case ("style", v) ⇒ ("style", cleanStyle(v))
}
val cleanSpanAttrs: Elem ⇒ Elem = changeAttrs(_.collect(filterStyle).toMap)
val filterRedundantTDAttrs = (m: Map[String, String]) ⇒ m.filter({
case ("rowspan", "1") ⇒ false
case ("colspan", "1") ⇒ false
case _ ⇒ true
})
def keepOnly(keyNames: String*) = (m: Map[String, String]) ⇒ m.filter(x ⇒ keyNames.contains(x._1))
val emptyAttributes = changeAttrs((_: Map[String, String]) ⇒ Map())
def saveIndentation(e: Elem) = e match {
case Elem(pref, name, attrs, scope, children @ _*) ⇒ {
val styleMap = getStyleMap(attrs).withDefault(_ ⇒ "")
def parseAndNormalize(s: String): Option[Double] = {
val re = new Regex("(-?[0-9.]+)([a-z]+)")
val ratios = Map[String, Double](
"mm" -> 1, "cm" -> 10, "in" -> 25.4)
val m = re.findAllIn(s)
if (!m.isEmpty) {
val num = m.group(1).toDouble
val unit = m.group(2)
val ratio = ratios.get(unit)
ratio.map(_ * num)
} else {
None
}
}
val textIndent = parseAndNormalize(styleMap("text-indent"))
val marginLeft = parseAndNormalize(styleMap("margin-left"))
val centered = styleMap("text-align") == "center"
val indentation = List(textIndent, marginLeft).collect({ case Some(x) ⇒ x }).sum
Elem(pref, name, new UnprefixedAttribute("indentation", Text(indentation.toString), new UnprefixedAttribute("centered", centered.toString, Null)), scope, true, children: _*)
}
}
n match {
case Elem(_, label, _, _, _*) ⇒ {
var e = n.asInstanceOf[Elem]
label match {
case "span" ⇒ cleanSpanAttrs(e)
case "table" ⇒ changeAttrs(keepOnly("rows", "cols"))(e)
case "td" ⇒ changeAttrs(filterRedundantTDAttrs.andThen(keepOnly("colspan", "rowspan")))(e)
case _ ⇒ emptyAttributes(e)
}
}
case _ ⇒ n
}
})
def fixSpans(nl: List[Node]): List[Node] = {
nl.flatMap({
case e @ (Elem(pref, label, attrs, scope, child @ _*)) ⇒ {
val child2 = fixSpans(child.toList)
e.label match {
case "span" ⇒ makeSpanOrIandB(pref, scope, attrs, child2)
case _ ⇒ List(Elem(pref, label, attrs, scope, true, child2: _*))
}
}
case n ⇒ List(n)
})
}
def makeSpanOrIandB(prefix: String, scope: NamespaceBinding, attrs: MetaData, child: Seq[Node]): Seq[Node] = {
def makePair(s: String) = s.span(c ⇒ c != ':') match {
case (k, "") ⇒ (k, "")
case (k, v) ⇒ (k, v.substring(1))
}
val attrMap = attrs.asAttrMap
val styleString = attrMap.get("style").getOrElse("")
val otherAttrs: Map[String, String] = attrMap - "style"
val styles = styleString.split(";").map(makePair).toMap
val italicPresent = styles.get("font-style") match {
case Some("italic") ⇒ true
case Some("oblique") ⇒ true
case _ ⇒ false
}
val boldPresent = styles.get("font-weight") match {
case Some("bold") ⇒ true
case Some("bolder") ⇒ true
case _ ⇒ false
}
val isSuperScript = styles.get("vertical-align") == Some("super")
val isSubScript = styles.get("vertical-align") == Some("sub")
val hasUnderline = styles.get("text-decoration") == Some("underline")
val otherStyles = styles - "font-style" - "font-weight" - "text-decoration" - "vertical-align"
val restMap: Map[String, String] = if (otherStyles.isEmpty) { otherAttrs } else {
otherAttrs + (("style", otherStyles.toList.map(x ⇒ x._1 + ":" + x._2).mkString("", ";", "")))
}
var e = if (restMap.isEmpty) { child } else { Elem(prefix, "span", mapToAttributes(restMap), scope, true, child: _*) }
e = if (italicPresent) { Elem(prefix, "i", Null, scope, true, e: _*) } else { e }
e = if (boldPresent) { Elem(prefix, "b", Null, scope, true, e: _*) } else { e }
e = if (isSuperScript) { Elem(prefix, "sup", Null, scope, true, e: _*) } else { e }
e = if (isSubScript) { Elem(prefix, "sub", Null, scope, true, e: _*) } else { e }
//e = if (hasUnderline) { Elem(prefix, "u", Null, scope, true, e: _*) } else { e }
if (hasUnderline) { logger.warn("text has underline!") }
//logger.info("makeSpanOrIandB: attrMap = " + attrMap + ", styleString = " + styleString + ", styles = " + styles + ", italicPresent = " + italicPresent + ", boldPresent = " + boldPresent + ", restMap.isEmpty = " + restMap.isEmpty + ", res = " + child3)
e
}
def mapElements[T](others: Node ⇒ T, elem: Elem ⇒ T) = (n: Node) ⇒
n match {
case e: Elem ⇒ elem(e)
case _ ⇒ others(n)
}
def id[T]: T ⇒ T = (t: T) ⇒ t
val validElements = Set("p", "span", "sup", "sub", "table", "tr", "td", "th", "b", "i", "ol", "li", "img", "blockquote", "u",
"h1","h2","h3","h4")
val cleanSeqNodes: List[Node] ⇒ List[Node] = bottomUp(mapElements(id,
(e: Elem) ⇒ if (validElements.contains(e.label)) { e } else { e.child }))
val headings = Set("h1", "h2", "h3", "h4")
val renameHeadings: List[Node] ⇒ List[Node] = bottomUp(mapElements(id,
(e: Elem) ⇒ if (headings.contains(e.label)) { e copy (label = "p") } else { e }))
def bottomUp(f: Node ⇒ Seq[Node]): Seq[Node] ⇒ List[Node] = (ns: Seq[Node]) ⇒ {
val chChildren = (n: Node) ⇒ (changeChildren(bottomUp(f))(n))
var nl = ns.iterator.toList
nl.flatMap(f.compose(chChildren))
}
def topDown(f: Node ⇒ Seq[Node]): Seq[Node] ⇒ List[Node] = (ns: Seq[Node]) ⇒ {
val chChildren = (n: Node) ⇒ (changeChildren(bottomUp(f))(n))
var nl = ns.iterator.toList
nl.flatMap(f).map(chChildren)
}
def topDownUntil(f: PartialFunction[Node, Seq[Node]]): Seq[Node] ⇒ List[Node] = (ns: Seq[Node]) ⇒ {
val rec = topDownUntil(f)
ns.toList.flatMap((n: Node) ⇒
(f.lift(n)) match {
case None ⇒ changeChildren(rec)(n)
case Some(ns2) ⇒ ns2
})
}
def transformTextWith[A](f: (A, String) ⇒ (String, A))(a: A): Seq[Node] ⇒ List[Node] = (ns: Seq[Node]) ⇒ {
def doit(bl: (A, List[Node]), n: Node): (A, List[Node]) = {
val (b1, l) = bl
n match {
case Text(t1) ⇒ {
val (t2, b2) = f(b1, t1)
if (t2.isEmpty) { (b2, l) }
else { (b2, Text(t2) :: l) }
}
case Elem(pref, name, attrs, scope, children @ _*) ⇒ {
val (b2, rl) = children.foldLeft(b1, List[Node]())(doit)
(b2, Elem(pref, name, attrs, scope, true, rl.reverse: _*) :: l)
}
case _ ⇒ (b1, n :: l)
}
}
val (_, rl) = ns.foldLeft(a, List[Node]())(doit)
rl.reverse
}
def transformTextBackwardsWith[A](f: (A, String) ⇒ Option[(String, A)])(a: A): Seq[Node] ⇒ List[Node] =
(ns: Seq[Node]) ⇒ {
def doit(bl: (A, List[Node], Boolean), n: Node): (A, List[Node], Boolean) = {
val (b1, l, skip) = bl
if (skip) { (b1, n :: l, skip) } else n match {
case Text(t1) ⇒
f(b1, t1) match {
case None ⇒ (b1, n :: l, true)
case Some((t2, b2)) ⇒ {
if (t2.isEmpty) { (b2, l, false) }
else { (b2, Text(t2) :: l, false) }
}
}
case Elem(pref, name, attrs, scope, children @ _*) ⇒ {
val (b2, rl, skip2) = children.reverse.foldLeft(b1, List[Node](), skip)(doit)
(b2, Elem(pref, name, attrs, scope, true, rl: _*) :: l, skip2)
}
case _ ⇒ (b1, n :: l, skip)
}
}
val (_, rl, _) = ns.reverse.foldLeft(a, List[Node](), false)(doit)
rl
}
val re1 = new Regex("(\\\\s| )+")
val re2 = new Regex("^ +")
val re3 = new Regex("“ +")
val re4 = new Regex(" +”")
def cleanSpaces(trimLeft: Boolean, s: String) = {
val s1 = re1.replaceAllIn(s, " ")
val s2 = re3.replaceAllIn(s1, "“")
val s3 = re4.replaceAllIn(s2, "”")
val s4 = if (!trimLeft || s3.isEmpty) { s3 }
else { re2.replaceFirstIn(s3, "") }
val tl = if (s4.isEmpty) { trimLeft }
else { s4.endsWith(" ") }
(s4, tl)
}
val re5 = new Regex(" +$")
val normalizeSpace: Seq[Node] ⇒ List[Node] = topDownUntil((n: Node) ⇒
n match {
case Elem(_, label, _, _, _*) if (label == "p" || label == "li" || label == "blockquote") ⇒ {
val ns1 = transformTextWith(cleanSpaces)(true)(n)
val ns2 = transformTextBackwardsWith(
(skip: Boolean, t: String) ⇒
if (skip) { None }
else { Some(re5.replaceFirstIn(t, ""), true) })(false)(ns1)
ns2
}
})
def getAttr(md: MetaData, key: String): String = {
md.get(key) match {
case None ⇒ ""
case Some(l) ⇒ l.map(_.text).mkString("", "", "")
}
}
val cleanSpuriousSpans = topDown((n: Node) ⇒
n match {
case Elem(pref, "span", attrs, scope, children @ _*) if (getAttr(attrs, "style").isEmpty ||
n.text.trim.isEmpty) ⇒ { children }
case e: Elem if (e.label == "i" || e.label == "b") && e.text.trim.isEmpty ⇒ { e.child }
case _ ⇒ n
})
val cleanNameSpaces = topDown((n: Node) ⇒
n match {
case Elem(pref, label, attrs, _, cl @ _*) ⇒
Elem(pref, label, attrs, TopScope, true, cl: _*)
case _ ⇒ n
})
def cleanRepeatedEmptyParagraphs(ns: Seq[Node]) = {
def f(n: Node, ns: List[Node]) =
n match {
case Elem(_, name, _, _, _*) if name == "p" || name == "blockquote" ⇒ ns match {
//case List() => n :: ns
case ((n2 @ Elem(_, "p", _, _, _*)) :: _) ⇒
if (n.text.isEmpty && n2.text.isEmpty) { ns }
else { n :: ns }
case _ ⇒ n :: ns
}
case _ ⇒ n :: ns
}
ns.foldRight(List[Node]())(f)
}
def getStyleMap(m: MetaData) = {
m.get("style") match {
case None ⇒ Map[String, String]()
case Some(s) ⇒ (NodeSeq fromSeq s).text.split(";").
map((x: String) ⇒ { val r = x.split(":"); (r(0), r(1)) }).
toMap
}
}
def styleIsTheSame(m1: MetaData, m2: MetaData) =
getStyleMap(m1) == getStyleMap(m2)
def collectWhiteSpace(ns: Seq[Node]): (Seq[Node], Seq[Node]) =
ns.span((n: Node) ⇒ n match {
case Text(t) ⇒ t.trim.isEmpty
case _ ⇒ false
})
val mergeTextNodes = topDown(
mapElements(id, changeChildren((ns: Seq[Node]) ⇒ {
def mergeTexts(n: Node, ns: List[Node]): List[Node] = {
(n, ns) match {
case (Text(t1), Text(t2) :: ns2) ⇒ (Text(t1 + t2) :: ns2)
case _ ⇒ n :: ns
}
}
ns.foldRight(List[Node]())(mergeTexts)
})))
val mergeSpans = bottomUp(
mapElements(id, (e: Elem) ⇒ {
val Elem(pref, label, attrs, scope, cl @ _*) = e
def mergeit(n: Node, ns: List[Node]) = {
val (ws, nss) = collectWhiteSpace(ns)
val res = n match {
case Elem(_, "span", attrs1, _, cl1 @ _*) ⇒
nss match {
case Elem(pref, "span", attrs2, scope, cl2 @ _*) :: ns2 ⇒
if (styleIsTheSame(attrs1, attrs2)) {
Elem(pref, "span", attrs2, scope, true, (cl1 ++ ws ++ cl2): _*) :: ns2
} else {
n :: ns
}
case _ ⇒ n :: ns
}
case _ ⇒ n :: ns
}
res
}
val cll = cl.foldRight(List[Node]())(mergeit)
Elem(pref, label, attrs, scope, true, cll: _*)
}))
val cleanSpecialCharacters = {
val cleanit = (x: Unit, s: String) ⇒ {
val s1 = s.map((c: Char) ⇒ c match {
case '\\u0096' ⇒ '-'
case _ ⇒ c
})
(s1, x)
}
transformTextWith(cleanit)(())
}
def applySeq[T](fs: Seq[T ⇒ T]) =
(v0: T) ⇒ fs.foldLeft(v0)((v: T, f: T ⇒ T) ⇒ f(v))
def applySeqTo[T](v0: T)(fs: Seq[T ⇒ T]) = applySeq(fs)(v0)
def pipelineXHTML(xhtml: Elem): List[Node] = {
def debug(where: String): List[Node] ⇒ List[Node] = (l: List[Node]) ⇒ {
println("debug: " + where + ":")
l.zipWithIndex foreach {
case (n,i) =>
println(" [%20s][%06d]: %s ".format(where,i,n.toString) )
}
l
}
val xhtml2 = renameHeadings(List(xhtml)).collect { case e : Elem => e }.head
val baseElems = selectBaseElems(xhtml2)
val divs = chooseDivs(baseElems)
val validElems = explodeDivs(divs)
val res = applySeqTo(validElems)(List[List[Node] ⇒ List[Node]](
//debug("start"),
cleanNameSpaces,
//debug("after cleanNameSpaces"),
cleanSeqNodes,
//debug("after cleanSeqNodes"),
_.flatMap(cleanAttributes),
normalizeSpace,
cleanSpuriousSpans,
mergeTextNodes,
mergeSpans,
fixSpans,
cleanRepeatedEmptyParagraphs,
cleanSpecialCharacters))
res
}
def pipelineWithDefaultConverter(source: Array[Byte], mimeType: String) : Option[List[Node]] =
pipeline(source,mimeType,defaultConverter)
def pipeline(source: Array[Byte], mimeType: String, converter : Converter): Option[List[Node]] =
convertSrcToXHTML(source, mimeType,converter).map(pipelineXHTML)
def pipeline(rtfSource: InputStream,converter : Converter = defaultConverter): XHTMLProcessorResult =
pipeline(IOUtils.toByteArray(rtfSource), "text/rtf",converter) match {
case None ⇒ Failure
case Some(x) ⇒ Success(x)
}
}
| lexml/lexml-parser-projeto-lei | src/main/scala/br/gov/lexml/parser/pl/xhtml/XHTML.scala | Scala | gpl-2.0 | 30,192 |
/*
* Licensed to Intel Corporation under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Intel Corporation licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.torch
import com.intel.analytics.bigdl.nn.{LeakyReLU, RReLU}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.RandomGenerator
import com.intel.analytics.bigdl.utils.RandomGenerator._
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
@com.intel.analytics.bigdl.tags.Serial
class LeakyReLUSpec extends FlatSpec with BeforeAndAfter with Matchers {
before {
if (!TH.hasTorch()) {
cancel("Torch is not installed")
}
}
def random(): Double = RandomGenerator.RNG.normal(-10, 10)
"A LeakyReLU Module " should "generate correct output and grad not inplace when train = true" in {
val seed = 100
RNG.setSeed(seed)
val module = new LeakyReLU[Double]()
val input = Tensor[Double](2, 2, 2)
input.apply1(x => random())
val gradOutput = Tensor[Double](2, 2, 2)
input.apply1(x => random())
val start = System.nanoTime()
val output = module.forward(input)
val gradInput = module.backward(input, gradOutput)
val end = System.nanoTime()
val scalaTime = end - start
val code = "torch.manualSeed(" + seed + ")\\n" +
"module = nn.LeakyReLU()\\n" +
"output = module:forward(input)\\n" +
"gradInput = module:backward(input,gradOutput)"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput"))
val luaOutput1 = torchResult("output").asInstanceOf[Tensor[Double]]
val luaOutput2 = torchResult("gradInput").asInstanceOf[Tensor[Double]]
luaOutput1 should be (output)
luaOutput2 should be (gradInput)
println("Test case : LeakyReLU, Torch : " + luaTime + " s, Scala : " + scalaTime / 1e9 + " s")
}
"A LeakyReLU Module " should "generate correct output and grad inplace when train = true" in {
val seed = 100
RNG.setSeed(seed)
val module = new LeakyReLU[Double](inplace = false)
val input = Tensor[Double](2, 2, 2)
input.apply1(x => random())
val gradOutput = Tensor[Double](2, 2, 2)
input.apply1(x => random())
val start = System.nanoTime()
val output = module.forward(input)
val gradInput = module.backward(input.clone(), gradOutput.clone())
val end = System.nanoTime()
val scalaTime = end - start
val code = "torch.manualSeed(" + seed + ")\\n" +
"module = nn.LeakyReLU(1/100,true)\\n" +
"output = module:forward(input)\\n" +
"gradInput = module:backward(input,gradOutput)"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput"))
val luaOutput1 = torchResult("output").asInstanceOf[Tensor[Double]]
val luaOutput2 = torchResult("gradInput").asInstanceOf[Tensor[Double]]
luaOutput1 should be (output)
luaOutput2 should be (gradInput)
println("Test case : LeakyReLU, Torch : " + luaTime + " s, Scala : " + scalaTime / 1e9 + " s")
}
}
| SeaOfOcean/BigDL | dl/src/test/scala/com/intel/analytics/bigdl/torch/LeakyReLUSpec.scala | Scala | apache-2.0 | 3,746 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.persistence
object PersistenceSpec {
def getCallerName(clazz: Class[_]): String = {
val s = (Thread.currentThread.getStackTrace map (_.getClassName) drop 1)
.dropWhile(_ matches "(java.lang.Thread|.*PersistenceSpec.?$)")
val reduced = s.lastIndexWhere(_ == clazz.getName) match {
case -1 ⇒ s
case z ⇒ s drop (z + 1)
}
reduced.head.replaceFirst(""".*\.""", "").replaceAll("[^a-zA-Z_0-9]", "_")
}
}
| rstento/lagom | persistence/core/src/test/scala/com/lightbend/lagom/persistence/PersistenceSpec.scala | Scala | apache-2.0 | 547 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui.jobs
import org.apache.spark.scheduler.SchedulingMode
import org.apache.spark.ui.{SparkUI, SparkUITab}
/** Web UI showing progress status of all jobs in the given SparkContext.
* Web UI显示给定SparkContext中所有作业的进度状态*/
private[ui] class JobsTab(parent: SparkUI) extends SparkUITab(parent, "jobs") {
val sc = parent.sc
val killEnabled = parent.killEnabled
//利用jobProgressListener中的统计监控数据生成激活,完成,失败等状态的Job摘要信息
val jobProgresslistener = parent.jobProgressListener
val executorListener = parent.executorsListener
val operationGraphListener = parent.operationGraphListener
def isFairScheduler: Boolean =
jobProgresslistener.schedulingMode.exists(_ == SchedulingMode.FAIR)
attachPage(new AllJobsPage(this))
attachPage(new JobPage(this))
}
| tophua/spark1.52 | core/src/main/scala/org/apache/spark/ui/jobs/JobsTab.scala | Scala | apache-2.0 | 1,666 |
package com.github.agourlay.cornichon.dsl
import munit.FunSuite
import scala.concurrent.duration.Duration
import scala.concurrent.{ Await, Future }
class BaseFeatureSpec extends FunSuite {
test("BaseFeature shutdowns resources") {
var counter = 0
val f1 = () => Future.successful({ counter = counter + 1 })
BaseFeature.addShutdownHook(f1)
val f2 = () => Future.successful({ counter = counter + 2 })
BaseFeature.addShutdownHook(f2)
Await.ready(BaseFeature.shutDownGlobalResources(), Duration.Inf)
assert(counter == 3)
}
}
| agourlay/cornichon | cornichon-core/src/test/scala/com/github/agourlay/cornichon/dsl/BaseFeatureSpec.scala | Scala | apache-2.0 | 558 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.internal.projection
import scala.concurrent.duration.Duration
import akka.annotation.InternalApi
import com.lightbend.lagom.projection.Started
import com.lightbend.lagom.projection.Status
import com.lightbend.lagom.projection.Stopped
import com.typesafe.config.Config
import java.util.concurrent.TimeUnit
import scala.concurrent.duration._
@InternalApi
sealed trait ProjectionConfig {
def writeMajorityTimeout: FiniteDuration
def defaultRequestedStatus: Status
}
@InternalApi
object ProjectionConfig {
def apply(config: Config): ProjectionConfig = {
new ProjectionConfigImpl(config.getConfig("lagom.projection"))
}
private final class ProjectionConfigImpl(config: Config) extends ProjectionConfig {
val writeMajorityTimeout: FiniteDuration =
config.getDuration("write.majority.timeout", TimeUnit.MILLISECONDS).millis
val defaultRequestedStatus: Status = {
val autoStartEnabled = config.getBoolean("auto-start.enabled")
if (autoStartEnabled) Started
else Stopped
}
}
}
| ignasi35/lagom | projection/core/src/main/scala/com/lightbend/lagom/internal/projection/ProjectionConfig.scala | Scala | apache-2.0 | 1,130 |
package com.lorandszakacs.sg.model.impl
import com.lorandszakacs.sg.model._
import com.lorandszakacs.util.effects._
import com.lorandszakacs.util.mongodb._
/**
*
* @author Lorand Szakacs, lsz@lorandszakacs.com
* @since 14 Jul 2017
*
*/
private[impl] class RepoSGIndex(override protected val db: Database)(
implicit
override val dbIOScheduler: DBIOScheduler,
override val futureLift: FutureLift[IO],
) extends IndexSingleDocRepo[SGIndex] with SGRepoBSON {
override protected def objectHandler: BSONDocumentHandler[SGIndex] = BSONMacros.handler[SGIndex]
override protected def uniqueDocumentId: String = "sg_index"
override protected def defaultEntity: SGIndex = SGIndex(
names = Nil,
needsReindexing = Nil,
number = 0,
)
private def sanitize(i: SGIndex): SGIndex = {
val temp = i.names.distinct.sorted
i.copy(
names = temp,
needsReindexing = i.needsReindexing.distinct.sorted,
number = temp.size,
)
}
private def sanitize(names: List[Name]): SGIndex = {
val temp = names.distinct.sorted
SGIndex(
names = temp,
needsReindexing = temp,
number = temp.length,
)
}
override def create(sg: SGIndex): IO[Unit] = {
super.create(sanitize(sg))
}
override def createOrUpdate(sg: SGIndex): IO[Unit] = {
super.createOrUpdate(sanitize(sg))
}
def rewriteIndex(names: List[Name]): IO[Unit] = {
this.createOrUpdate(sanitize(names))
}
}
| lorandszakacs/sg-downloader | sg-repo/src/main/scala/com/lorandszakacs/sg/model/impl/RepoSGIndex.scala | Scala | apache-2.0 | 1,517 |
/**
* Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0
* See accompanying LICENSE file.
*/
package models.navigation
/**
* @author hiral
*/
object Menus {
import models.navigation.QuickRoutes._
def clusterMenus(cluster: String) : IndexedSeq[Menu] = IndexedSeq(
Menu("Cluster",IndexedSeq(
"Summary".clusterRouteMenuItem(cluster),
"List".baseRouteMenuItem,
"Add Cluster".baseRouteMenuItem),
None),
"Brokers".clusterMenu(cluster),
Menu("Topic",IndexedSeq(
"List".clusterRouteMenuItem(cluster),
"Create".clusterRouteMenuItem(cluster)),
None),
"Offset".clusterMenu(cluster),
"Preferred Replica Election".clusterMenu(cluster),
"Reassign Partitions".clusterMenu(cluster)
)
def indexMenu : IndexedSeq[Menu] = IndexedSeq(
Menu("Cluster",IndexedSeq(
"List".baseRouteMenuItem,
"Add Cluster".baseRouteMenuItem),
None)
)
}
| hecran/FUCKWHENXIN | app/models/navigation/Menus.scala | Scala | apache-2.0 | 940 |
package eventstore.examples
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Source
import eventstore.{ EventStoreExtension, EventStream }
object ListAllStreamsExample extends App {
implicit val system = ActorSystem()
import system.dispatcher
implicit val materializer = ActorMaterializer()
val connection = EventStoreExtension(system).connection
val publisher = connection.streamPublisher(EventStream.System.`$streams`, infinite = false, resolveLinkTos = true)
Source.fromPublisher(publisher)
.runForeach { x => println(x.streamId.streamId) }
.onComplete { _ => system.terminate() }
}
| pawelkaczor/EventStore.JVM | src/main/scala/eventstore/examples/ListAllStreamsExample.scala | Scala | bsd-3-clause | 654 |
package com.lagnada.demo.scalatra
import org.scalatra._
import javax.servlet.ServletContext
class ScalatraBootstrap extends LifeCycle {
override def init(context: ServletContext) {
context.mount(new MainServlet, "/*")
}
}
| nfet/scalatra-demo | src/main/scala/com/lagnada/demo/scalatra/ScalatraBootstrap.scala | Scala | apache-2.0 | 232 |
package mesosphere.marathon
import java.util
import javax.inject.{ Inject, Named }
import akka.actor.{ ActorRef, ActorSystem }
import akka.event.EventStream
import com.fasterxml.jackson.databind.ObjectMapper
import mesosphere.marathon.MarathonSchedulerActor.ScaleApp
import mesosphere.marathon.Protos.MarathonTask
import mesosphere.marathon.event._
import mesosphere.marathon.health.HealthCheckManager
import mesosphere.marathon.state.{ AppDefinition, AppRepository, PathId, Timestamp }
import mesosphere.marathon.tasks._
import mesosphere.marathon.tasks.TaskQueue.QueuedTask
import mesosphere.mesos.util.FrameworkIdUtil
import mesosphere.mesos.{ TaskBuilder, protos }
import org.apache.log4j.Logger
import org.apache.mesos.Protos._
import org.apache.mesos.{ Scheduler, SchedulerDriver }
import scala.collection.JavaConverters._
import scala.collection.immutable.Seq
import scala.concurrent.{ Await, Future }
import scala.util.{ Failure, Success }
trait SchedulerCallbacks {
def disconnected(): Unit
}
object MarathonScheduler {
private class MarathonSchedulerCallbacksImpl(
serviceOption: Option[MarathonSchedulerService])
extends SchedulerCallbacks {
override def disconnected(): Unit = {
// Abdicate leadership when we become disconnected from the Mesos master.
serviceOption.foreach(_.abdicateLeadership())
}
}
val callbacks: SchedulerCallbacks = new MarathonSchedulerCallbacksImpl(
Some(Main.injector.getInstance(classOf[MarathonSchedulerService]))
)
}
class MarathonScheduler @Inject() (
@Named(EventModule.busName) eventBus: EventStream,
@Named("restMapper") mapper: ObjectMapper,
@Named("schedulerActor") schedulerActor: ActorRef,
appRepo: AppRepository,
healthCheckManager: HealthCheckManager,
taskTracker: TaskTracker,
taskQueue: TaskQueue,
frameworkIdUtil: FrameworkIdUtil,
taskIdUtil: TaskIdUtil,
system: ActorSystem,
config: MarathonConf) extends Scheduler {
private[this] val log = Logger.getLogger(getClass.getName)
import mesosphere.mesos.protos.Implicits._
import mesosphere.util.ThreadPoolContext.context
implicit val zkTimeout = config.zkFutureTimeout
override def registered(
driver: SchedulerDriver,
frameworkId: FrameworkID,
master: MasterInfo): Unit = {
log.info(s"Registered as ${frameworkId.getValue} to master '${master.getId}'")
frameworkIdUtil.store(frameworkId)
eventBus.publish(SchedulerRegisteredEvent(frameworkId.getValue, master.getHostname))
}
override def reregistered(driver: SchedulerDriver, master: MasterInfo): Unit = {
log.info("Re-registered to %s".format(master))
eventBus.publish(SchedulerReregisteredEvent(master.getHostname))
}
override def resourceOffers(driver: SchedulerDriver, offers: java.util.List[Offer]): Unit = {
// Check for any tasks which were started but never entered TASK_RUNNING
// TODO resourceOffers() doesn't feel like the right place to run this
val toKill = taskTracker.checkStagedTasks
if (toKill.nonEmpty) {
log.warn(s"There are ${toKill.size} tasks stuck in staging which will be killed")
log.info(s"About to kill these tasks: $toKill")
for (task <- toKill)
driver.killTask(protos.TaskID(task.getId))
}
// remove queued tasks with stale (non-current) app definition versions
val appVersions: Map[PathId, Timestamp] =
Await.result(appRepo.currentAppVersions(), config.zkTimeoutDuration)
taskQueue.retain {
case QueuedTask(app, _) =>
appVersions.get(app.id) contains app.version
}
for (offer <- offers.asScala) {
try {
log.debug("Received offer %s".format(offer))
val matchingTask = taskQueue.pollMatching { app =>
newTask(app, offer).map(app -> _)
}
matchingTask.foreach {
case (app, (taskInfo, ports)) =>
val marathonTask = MarathonTasks.makeTask(
taskInfo.getTaskId.getValue, offer.getHostname, ports,
offer.getAttributesList.asScala, app.version)
log.debug("Launching task: " + taskInfo)
taskTracker.created(app.id, marathonTask)
driver.launchTasks(Seq(offer.getId).asJava, util.Arrays.asList(taskInfo))
// here it is assumed that the health checks for the current
// version are already running.
}
if (matchingTask.isEmpty) {
log.debug("Offer doesn't match request. Declining.")
driver.declineOffer(offer.getId)
}
}
catch {
case t: Throwable =>
log.error("Caught an exception. Declining offer.", t)
// Ensure that we always respond
driver.declineOffer(offer.getId)
}
}
}
override def offerRescinded(driver: SchedulerDriver, offer: OfferID): Unit = {
log.info("Offer %s rescinded".format(offer))
}
override def statusUpdate(driver: SchedulerDriver, status: TaskStatus): Unit = {
log.info("Received status update for task %s: %s (%s)"
.format(status.getTaskId.getValue, status.getState, status.getMessage))
val appId = taskIdUtil.appId(status.getTaskId)
// forward health changes to the health check manager
val maybeTask = taskTracker.fetchTask(appId, status.getTaskId.getValue)
for (marathonTask <- maybeTask)
healthCheckManager.update(status, Timestamp(marathonTask.getVersion))
import org.apache.mesos.Protos.TaskState._
val killedForFailingHealthChecks =
status.getState == TASK_KILLED && status.hasHealthy && !status.getHealthy
if (status.getState == TASK_ERROR || status.getState == TASK_FAILED || killedForFailingHealthChecks)
appRepo.currentVersion(appId).foreach {
_.foreach(taskQueue.rateLimiter.addDelay)
}
status.getState match {
case TASK_ERROR | TASK_FAILED | TASK_FINISHED | TASK_KILLED | TASK_LOST =>
// Remove from our internal list
taskTracker.terminated(appId, status).foreach { taskOption =>
taskOption match {
case Some(task) => postEvent(status, task)
case None => log.warn(s"Couldn't post event for ${status.getTaskId}")
}
schedulerActor ! ScaleApp(appId, force = false)
}
case TASK_RUNNING if !maybeTask.exists(_.hasStartedAt) => // staged, not running
taskTracker.running(appId, status).onComplete {
case Success(task) =>
appRepo.app(appId, Timestamp(task.getVersion)).onSuccess {
case maybeApp => maybeApp.foreach(taskQueue.rateLimiter.resetDelay)
}
postEvent(status, task)
case Failure(t) =>
log.warn(s"Couldn't post event for ${status.getTaskId}", t)
log.warn(s"Killing task ${status.getTaskId}")
driver.killTask(status.getTaskId)
}
case TASK_STAGING if !taskTracker.contains(appId) =>
log.warn(s"Received status update for unknown app $appId")
log.warn(s"Killing task ${status.getTaskId}")
driver.killTask(status.getTaskId)
case _ =>
taskTracker.statusUpdate(appId, status).onSuccess {
case None =>
log.warn(s"Killing task ${status.getTaskId}")
driver.killTask(status.getTaskId)
}
}
}
override def frameworkMessage(
driver: SchedulerDriver,
executor: ExecutorID,
slave: SlaveID,
message: Array[Byte]): Unit = {
log.info("Received framework message %s %s %s ".format(executor, slave, message))
eventBus.publish(MesosFrameworkMessageEvent(executor.getValue, slave.getValue, message))
}
def unhealthyTaskKilled(appId: PathId, taskId: String): Unit = {
log.warn(s"Task [$taskId] for app [$appId] was killed for failing too many health checks")
appRepo.currentVersion(appId).foreach {
_.foreach { app => taskQueue.rateLimiter.addDelay(app) }
}
}
override def disconnected(driver: SchedulerDriver) {
log.warn("Disconnected")
eventBus.publish(SchedulerDisconnectedEvent())
// Disconnection from the Mesos master has occurred.
// Thus, call the scheduler callbacks.
MarathonScheduler.callbacks.disconnected()
}
override def slaveLost(driver: SchedulerDriver, slave: SlaveID) {
log.info(s"Lost slave $slave")
}
override def executorLost(
driver: SchedulerDriver,
executor: ExecutorID,
slave: SlaveID,
p4: Int) {
log.info(s"Lost executor $executor slave $p4")
}
override def error(driver: SchedulerDriver, message: String) {
log.warn("Error: %s".format(message))
suicide()
}
private def suicide(): Unit = {
log.fatal("Committing suicide")
// Asynchronously call sys.exit() to avoid deadlock due to the JVM shutdown hooks
Future {
sys.exit(9)
} onFailure {
case t: Throwable => log.fatal("Exception while committing suicide", t)
}
}
private def postEvent(status: TaskStatus, task: MarathonTask): Unit = {
log.info("Sending event notification.")
eventBus.publish(
MesosStatusUpdateEvent(
status.getSlaveId.getValue,
status.getTaskId.getValue,
status.getState.name,
if (status.hasMessage) status.getMessage else "",
taskIdUtil.appId(status.getTaskId),
task.getHost,
task.getPortsList.asScala,
task.getVersion
)
)
}
private def newTask(
app: AppDefinition,
offer: Offer): Option[(TaskInfo, Seq[Long])] = {
new TaskBuilder(app, taskIdUtil.newTaskId, taskTracker, config, mapper).buildIfMatches(offer)
}
}
| drewrobb/marathon | src/main/scala/mesosphere/marathon/MarathonScheduler.scala | Scala | apache-2.0 | 9,582 |
package com.lambtors.poker_api.module.poker.application.flop
import com.lambtors.poker_api.infrastructure.command_bus.CommandHandler
import com.lambtors.poker_api.module.poker.domain.model.GameId
import com.lambtors.poker_api.module.shared.domain.Validation.Validation
final class AddFlopCardsToTableCommandHandler[P[_]](adder: FlopCardsAdder[P])
extends CommandHandler[P, AddFlopCardsToTableCommand] {
def handle(command: AddFlopCardsToTableCommand): Validation[P[Unit]] =
GameId.fromString(command.gameId).map(adder.add)
}
| lambtors/poker-api | src/main/scala/com/lambtors/poker_api/module/poker/application/flop/AddFlopCardsToTableCommandHandler.scala | Scala | mit | 537 |
package com.rasterfoundry.batch.geojsonImport
import com.rasterfoundry.batch.Job
import com.rasterfoundry.common.S3
import com.rasterfoundry.database.AnnotationDao
import com.rasterfoundry.database.GeojsonUploadDao
import com.rasterfoundry.database.UserDao
import com.rasterfoundry.database.util.RFTransactor
import com.rasterfoundry.datamodel._
import cats.effect._
import cats.implicits._
import com.amazonaws.services.s3.AmazonS3URI
import com.typesafe.scalalogging.LazyLogging
import com.typesafe.scalalogging.LazyLogging
import doobie.ConnectionIO
import doobie.implicits._
import io.circe.parser.decode
import scala.util._
import java.net.URLDecoder
import java.nio.charset.StandardCharsets
import java.util.UUID
import java.util.UUID
object ImportGeojsonFiles extends Job with LazyLogging {
val name = "import_geojson_files"
val xa = RFTransactor.nonHikariTransactor(RFTransactor.TransactorConfig())
def processUploadToAnnotations(
upload: GeojsonUpload
): List[Annotation.Create] = {
val s3Client = S3()
// download and parse files
upload.files.flatMap { uri =>
{
logger.info(s"Downloading file: ${uri}")
// Only s3 uris are currently supported.
val s3Uri = new AmazonS3URI(URLDecoder.decode(uri, "utf-8"))
val s3Object = s3Client.getObject(s3Uri.getBucket, s3Uri.getKey)
val geojsonString =
new String(S3.getObjectBytes(s3Object), StandardCharsets.UTF_8)
logger.info("Annotations downloaded")
logger.info("Parsing annotations")
val annotations =
decode[AnnotationFeatureCollectionCreate](geojsonString) match {
case Right(fc) => fc.features.map(_.toAnnotationCreate).toList
case Left(e) => throw e
}
logger.info(s"${annotations.size} annotations parsed")
annotations
}
}
}
def insertAnnotations(
annotations: List[Annotation.Create],
upload: GeojsonUpload
): ConnectionIO[Int] = {
// https://makk.es/blog/postgresql-parameter-limitation/
// max # of interpolated params in postgres driver = 32,767 (2 byte int)
// each annotation = 17 params
// 32,767 / 17 = 1927 annotations / batch. Cut that in half to 1000 to be safe.
for {
user <- UserDao.unsafeGetUserById(upload.createdBy)
inserted <- annotations
.grouped(1000)
.toList
.traverse(annotationBatch => {
val updatedAnnotationBatch =
annotationBatch
.map(_.copy(annotationGroup = Some(upload.annotationGroup)))
AnnotationDao
.insertAnnotations(
updatedAnnotationBatch,
upload.projectId,
user,
Some(upload.projectLayerId)
)
.map(_.size)
})
.map { _.foldLeft(0)(_ + _) }
} yield inserted
}
def runJob(args: List[String]): IO[Unit] = {
val uploadIdO = args.headOption.map(UUID.fromString(_))
for {
uploadO <- uploadIdO match {
case Some(id) => GeojsonUploadDao.getUploadById(id).transact(xa)
case _ => Option.empty.pure[IO]
}
inserts <- uploadO match {
case Some(upload) =>
val annotations = processUploadToAnnotations(upload)
insertAnnotations(annotations, upload).transact(xa)
case _ =>
throw new RuntimeException(
s"No geojson upload found with id: ${uploadIdO}"
)
}
} yield {
logger.info(s"Uploaded ${inserts} annotations")
}
}
}
| raster-foundry/raster-foundry | app-backend/batch/src/main/scala/geojsonImport/ImportGeojsonFiles.scala | Scala | apache-2.0 | 3,554 |
package com.marmoush.scalasamples.hackerrank.functional.recursion
import scala.annotation.tailrec
object SumsOfPowers {
def main(args: Array[String]): Unit = {
val x = 100 // total 1:1000
val n = 2 // exp 2:10
val max = math.pow(x, (1d / n)).toInt
// println(max)
val pow = 1
.to(max)
.map {
math.pow(_, n).toInt
}
.toList
// println("Power:" + pow)
def sums(c: List[Int]) = {
@tailrec
def f(c: List[Int], res: List[Int] = List(0)): List[Int] = c match {
//println(s"op: $res + $x = ${res + x}")
case Nil => res
case _ => f(c.tail, c.head + res.head :: res)
}
f(c).reverse.tail
}
val sum = 0 :: sums(pow)
// println("Sum:" + sum)
val minimaIndex =
(sum, pow).zipped.map(_ + _).zipWithIndex.maxBy(_._1 > x)._2
val minima = pow(minimaIndex)
// println("minima:" + minima)
def oneComb(total: Int, head: Int, tee: List[Int]): List[List[Int]] = {
val min = total - head
val c = tee.dropWhile {
_ > min
}
// println(s"Searching total($total)-($head) - sum of any combination in $c")
1.to(c.length).map {
c.combinations(_)
.filter {
_.sum == min
}
.toList
.map {
head :: _
}
}
}.flatten.toList
val powRev = pow.reverse
def allCombs(x: Int, minima: Int, l: List[Int], res: List[List[Int]]): List[List[Int]] = l match {
case Nil => res
case l if (l.head < minima) => res
case _ =>
allCombs(x, minima: Int, l.tail, oneComb(x, l.head, l.tail) ++ res)
}
val r = allCombs(x, minima, powRev, Nil)
val p = if (math.pow(max, n).toInt == x) r.length + 1 else r.length
println(p)
// println("PowRev"+powRev)
// def combs(x: Int, powRev: List[Int], res: List[List[Int]]): List[List[Int]] = powRev match {
// case Nil => res
// case _ => {
// println("head"+powRev.head)
// val m = combs(x, powRev.tail, oneComb(x - powRev.head, powRev.tail) ++ res)
// println("m"+m)
// m.filter { x - powRev.head - _.sum == 0 }
// }
// }
// println("------------------")
// val ccc=combs(x,powRev,Nil)
// println(ccc)
// // val combs1 = oneComb(x - powRev.head, powRev.tail)
// // println(combs1.filter { x - powRev.head - _.sum == 0 })
//
// // minima.until(pow.length).map { x => ??? }
}
def calc = {
@tailrec
def factorial(n: Int, accumulator: Long = 1): Long = {
if (n == 0) accumulator else factorial(n - 1, (accumulator * n))
}
def ncr(n: Int, r: Int) = factorial(n) / (factorial(r) * factorial(n - r))
def allNcr(n: Int) = {
for (x <- 1 until n) yield ncr(n, x)
}.sum
// println(allNcr(10))
// 2.to(10).foreach { x => println(allNcr(x)) }
}
}
| IsmailMarmoush/scala-samples | scalalang/src/main/scala/com/marmoush/scalasamples/hackerrank/functional/recursion/SumsOfPowers.scala | Scala | agpl-3.0 | 2,972 |
package autolift.cats
import autolift._
trait `LiftA*Package` extends LiftA2Context
with LiftA3Context
with LiftA4Context
with LiftA5Context
with LiftA6Context
with LiftA7Context
with LiftA8Context
with LiftA9Context
with LiftA10Context
with LiftA11Context
with LiftA12Context
with LiftA13Context
with LiftA14Context
with LiftA15Context
with LiftA16Context
with LiftA17Context
with LiftA18Context
with LiftA19Context
with LiftA20Context
with LiftA21Context
with LiftA2Reexport
with LiftA3Reexport
with LiftA4Reexport
with LiftA5Reexport
with LiftA6Reexport
with LiftA7Reexport
with LiftA8Reexport
with LiftA9Reexport
with LiftA10Reexport
with LiftA11Reexport
with LiftA12Reexport
with LiftA13Reexport
with LiftA14Reexport
with LiftA15Reexport
with LiftA16Reexport
with LiftA17Reexport
with LiftA18Reexport
with LiftA19Reexport
with LiftA20Reexport
with LiftA21Reexport | wheaties/AutoLifts | autolift-cats/src/main/scala/autolift/cats/LiftA*.scala | Scala | apache-2.0 | 948 |
/**
* Copyright 2009 Jorge Ortiz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**/
package org.scala_tools.javautils.s2j
trait SWrapper extends Wrapper {
protected val wrapperType = "Java"
def asScala: Wrapped = underlying
def asJava: this.type = this
}
| jorgeortiz85/scala-javautils | src/main/scala/org/scala_tools/javautils/s2j/SWrapper.scala | Scala | apache-2.0 | 777 |
/**
* Created by lrocha on 09-Jan-17.
*/
//TODO create the tests here for the functions in the exercises
| leomrocha/ScalaTraining | day-003_tc/src/test/scala/hogsSpec.scala | Scala | mit | 110 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.columnar
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{GenericInternalRow, Attribute, AttributeMap, AttributeReference}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
private[sql] class ColumnStatisticsSchema(a: Attribute) extends Serializable {
val upperBound = AttributeReference(a.name + ".upperBound", a.dataType, nullable = true)()
val lowerBound = AttributeReference(a.name + ".lowerBound", a.dataType, nullable = true)()
val nullCount = AttributeReference(a.name + ".nullCount", IntegerType, nullable = false)()
val count = AttributeReference(a.name + ".count", IntegerType, nullable = false)()
val sizeInBytes = AttributeReference(a.name + ".sizeInBytes", LongType, nullable = false)()
val schema = Seq(lowerBound, upperBound, nullCount, count, sizeInBytes)
}
private[sql] class PartitionStatistics(tableSchema: Seq[Attribute]) extends Serializable {
val (forAttribute, schema) = {
val allStats = tableSchema.map(a => a -> new ColumnStatisticsSchema(a))
(AttributeMap(allStats), allStats.map(_._2.schema).foldLeft(Seq.empty[Attribute])(_ ++ _))
}
}
/**
* Used to collect statistical information when building in-memory columns.
*
* NOTE: we intentionally avoid using `Ordering[T]` to compare values here because `Ordering[T]`
* brings significant performance penalty.
*/
private[sql] sealed trait ColumnStats extends Serializable {
protected var count = 0
protected var nullCount = 0
protected var sizeInBytes = 0L
/**
* Gathers statistics information from `row(ordinal)`.
*/
def gatherStats(row: InternalRow, ordinal: Int): Unit = {
if (row.isNullAt(ordinal)) {
nullCount += 1
// 4 bytes for null position
sizeInBytes += 4
}
count += 1
}
/**
* Column statistics represented as a single row, currently including closed lower bound, closed
* upper bound and null count.
*/
def collectedStatistics: GenericInternalRow
}
/**
* A no-op ColumnStats only used for testing purposes.
*/
private[sql] class NoopColumnStats extends ColumnStats {
override def gatherStats(row: InternalRow, ordinal: Int): Unit = super.gatherStats(row, ordinal)
override def collectedStatistics: GenericInternalRow =
new GenericInternalRow(Array[Any](null, null, nullCount, count, 0L))
}
private[sql] class BooleanColumnStats extends ColumnStats {
protected var upper = false
protected var lower = true
override def gatherStats(row: InternalRow, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row.getBoolean(ordinal)
if (value > upper) upper = value
if (value < lower) lower = value
sizeInBytes += BOOLEAN.defaultSize
}
}
override def collectedStatistics: GenericInternalRow =
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
private[sql] class ByteColumnStats extends ColumnStats {
protected var upper = Byte.MinValue
protected var lower = Byte.MaxValue
override def gatherStats(row: InternalRow, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row.getByte(ordinal)
if (value > upper) upper = value
if (value < lower) lower = value
sizeInBytes += BYTE.defaultSize
}
}
override def collectedStatistics: GenericInternalRow =
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
private[sql] class ShortColumnStats extends ColumnStats {
protected var upper = Short.MinValue
protected var lower = Short.MaxValue
override def gatherStats(row: InternalRow, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row.getShort(ordinal)
if (value > upper) upper = value
if (value < lower) lower = value
sizeInBytes += SHORT.defaultSize
}
}
override def collectedStatistics: GenericInternalRow =
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
private[sql] class IntColumnStats extends ColumnStats {
protected var upper = Int.MinValue
protected var lower = Int.MaxValue
override def gatherStats(row: InternalRow, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row.getInt(ordinal)
if (value > upper) upper = value
if (value < lower) lower = value
sizeInBytes += INT.defaultSize
}
}
override def collectedStatistics: GenericInternalRow =
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
private[sql] class LongColumnStats extends ColumnStats {
protected var upper = Long.MinValue
protected var lower = Long.MaxValue
override def gatherStats(row: InternalRow, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row.getLong(ordinal)
if (value > upper) upper = value
if (value < lower) lower = value
sizeInBytes += LONG.defaultSize
}
}
override def collectedStatistics: GenericInternalRow =
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
private[sql] class FloatColumnStats extends ColumnStats {
protected var upper = Float.MinValue
protected var lower = Float.MaxValue
override def gatherStats(row: InternalRow, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row.getFloat(ordinal)
if (value > upper) upper = value
if (value < lower) lower = value
sizeInBytes += FLOAT.defaultSize
}
}
override def collectedStatistics: GenericInternalRow =
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
private[sql] class DoubleColumnStats extends ColumnStats {
protected var upper = Double.MinValue
protected var lower = Double.MaxValue
override def gatherStats(row: InternalRow, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row.getDouble(ordinal)
if (value > upper) upper = value
if (value < lower) lower = value
sizeInBytes += DOUBLE.defaultSize
}
}
override def collectedStatistics: GenericInternalRow =
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
private[sql] class StringColumnStats extends ColumnStats {
protected var upper: UTF8String = null
protected var lower: UTF8String = null
override def gatherStats(row: InternalRow, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row.getUTF8String(ordinal)
if (upper == null || value.compareTo(upper) > 0) upper = value
if (lower == null || value.compareTo(lower) < 0) lower = value
sizeInBytes += STRING.actualSize(row, ordinal)
}
}
override def collectedStatistics: GenericInternalRow =
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
private[sql] class BinaryColumnStats extends ColumnStats {
override def gatherStats(row: InternalRow, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
sizeInBytes += BINARY.actualSize(row, ordinal)
}
}
override def collectedStatistics: GenericInternalRow =
new GenericInternalRow(Array[Any](null, null, nullCount, count, sizeInBytes))
}
private[sql] class FixedDecimalColumnStats(precision: Int, scale: Int) extends ColumnStats {
protected var upper: Decimal = null
protected var lower: Decimal = null
override def gatherStats(row: InternalRow, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row.getDecimal(ordinal, precision, scale)
if (upper == null || value.compareTo(upper) > 0) upper = value
if (lower == null || value.compareTo(lower) < 0) lower = value
sizeInBytes += FIXED_DECIMAL.defaultSize
}
}
override def collectedStatistics: GenericInternalRow =
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
private[sql] class GenericColumnStats(dataType: DataType) extends ColumnStats {
val columnType = GENERIC(dataType)
override def gatherStats(row: InternalRow, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
sizeInBytes += columnType.actualSize(row, ordinal)
}
}
override def collectedStatistics: GenericInternalRow =
new GenericInternalRow(Array[Any](null, null, nullCount, count, sizeInBytes))
}
private[sql] class DateColumnStats extends IntColumnStats
private[sql] class TimestampColumnStats extends LongColumnStats
| ArvinDevel/onlineAggregationOnSparkV2 | sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnStats.scala | Scala | apache-2.0 | 9,661 |
package com.outr.stripe.bank
import scala.scalajs.js
@js.native
trait StripeBank extends js.Object {
def createToken(info: StripeBankInfo, responseHandler: js.Function2[Int, BankTokenInfo, Unit]): Unit = js.native
def validateRoutingNumber(number: String, country: String): Boolean = js.native
def validateAccountNumber(number: String, country: String): Boolean = js.native
} | outr/scala-stripe | core/js/src/main/scala/com/outr/stripe/bank/StripeBank.scala | Scala | mit | 383 |
package kadai
import org.specs2.{ ScalaCheck, Specification }
import org.scalacheck.Prop
class TraversableOptionalSpec extends Specification with ScalaCheck {
def is = s2"""
TraversableOptionalSyntax should
add .tailOption $addTailOption
add .headTailOption $addHeadTailOption
add .initOption $addInitOption
add .initLastOption $addInitLastOption
add .notEmpty $addNotEmpty
"""
def addTailOption = Prop.forAll { ls: List[String] =>
(ls, ls.tailOption) must beLike {
case (List(), None) => ok
case (_ :: as, Some(bs)) => as === bs
}
}
def addHeadTailOption = Prop.forAll { ls: List[String] =>
(ls, ls.headTailOption) must beLike {
case (Nil, None) => ok
case (a :: as, Some((b, bs))) => (a === b) and (as === bs)
}
}
def addInitOption = Prop.forAll { ls: List[String] =>
(ls, ls.initOption) must beLike {
case (List(), None) => ok
case (as, Some(bs)) => as.init === bs
}
}
def addInitLastOption = Prop.forAll { ls: List[String] =>
(ls, ls.initLastOption) must beLike {
case (Nil, None) => ok
case (as, Some((bs, b))) => (as.last === b) and (as.init === bs)
}
}
val array = Array(2)
def addNotEmpty =
List[String]().notEmpty === None and
List(1).notEmpty === Some(List(1)) and
Vector[String]().notEmpty === None and
Vector(3).notEmpty === Some(Vector(3)) and
Array[String]().notEmpty === None and
array.notEmpty === Some(array) and
"".notEmpty === None and
"fred".notEmpty === Some("fred")
} | simpleenergy/kadai | core/src/test/scala/kadai/TraversableOptionalSpec.scala | Scala | apache-2.0 | 1,714 |
package org.jetbrains.plugins.scala
package codeInspection.relativeImports
import com.intellij.codeInspection.{LocalQuickFix, ProblemDescriptor, ProblemsHolder}
import com.intellij.openapi.project.Project
import com.intellij.psi.{PsiElement, PsiPackage}
import org.jetbrains.plugins.scala.codeInspection.{AbstractFixOnPsiElement, AbstractInspection}
import org.jetbrains.plugins.scala.lang.formatting.settings.ScalaCodeStyleSettings
import org.jetbrains.plugins.scala.lang.psi.api.base.ScStableCodeReferenceElement
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.ScImportExpr
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScObject
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createReferenceFromText
import org.jetbrains.plugins.scala.lang.resolve.ScalaResolveResult
import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
/**
* @author Alefas
* @since 14.09.12
*/
class RelativeImportInspection extends AbstractInspection("RelativeImport", "Relative Import") {
import org.jetbrains.plugins.scala.codeInspection.relativeImports.RelativeImportInspection.qual
override def actionFor(implicit holder: ProblemsHolder): PartialFunction[PsiElement, Any] = {
case expr: ScImportExpr if expr.qualifier != null =>
val q = qual(expr.qualifier)
val resolve = q.multiResolve(false)
for (elem <- resolve) {
def applyProblem(qualifiedName: String) {
val fixes = new ArrayBuffer[LocalQuickFix]()
if (!ScalaCodeStyleSettings.getInstance(q.getProject).isAddFullQualifiedImports) {
fixes += new EnableFullQualifiedImports()
}
fixes += new MakeFullQualifiedImportFix(q, qualifiedName)
holder.registerProblem(q, "Relative import detected", fixes: _*)
}
elem match {
case ScalaResolveResult(p: PsiPackage, _) if p.getQualifiedName.contains(".") =>
applyProblem(p.getQualifiedName)
case ScalaResolveResult(c: ScObject, _) if c.isTopLevel && c.qualifiedName.contains(".") =>
applyProblem(c.qualifiedName)
case _ =>
}
}
}
}
object RelativeImportInspection {
@tailrec
def qual(st: ScStableCodeReferenceElement): ScStableCodeReferenceElement = st.qualifier match {
case Some(q) => qual(q)
case _ => st
}
}
private class EnableFullQualifiedImports extends LocalQuickFix {
override def getName: String = getFamilyName
override def getFamilyName: String = "Enable full qualified imports"
override def applyFix(project: Project, descriptor: ProblemDescriptor): Unit = {
ScalaCodeStyleSettings.getInstance(project).setAddFullQualifiedImports(true)
}
}
private class MakeFullQualifiedImportFix(q: ScStableCodeReferenceElement, fqn: String)
extends AbstractFixOnPsiElement(ScalaBundle.message("make.import.fully.qualified"), q) {
override def doApplyFix(project: Project): Unit = {
val ref = getElement
if (ref == null || !ref.isValid) return
val newRef = createReferenceFromText(fqn, ref.getContext, ref)
import org.jetbrains.plugins.scala.codeInspection.relativeImports.RelativeImportInspection.qual
val newFqn = qual(newRef).resolve() match {
case p: PsiPackage if p.getQualifiedName.contains(".") => "_root_." + fqn
case _: PsiPackage => fqn
case _ => "_root_." + fqn
}
ref.replace(createReferenceFromText(newFqn)(ref.getManager))
}
}
| triplequote/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/codeInspection/relativeImports/RelativeImportInspection.scala | Scala | apache-2.0 | 3,468 |
package c3.w1
/**
* segmentRec uses a threshold to start parallel threads with recursion.
*/
class PNorm {
// Identity function
def parallel[A, B](dummy1: => A, dummy2: => B): (A, B) = (dummy1, dummy2)
def sumSegments(a: Array[Int], p: Double, s: Int, t: Int): Int = {
a.slice(s, t).foldLeft(0)((acc, x) => acc + Math.pow(x, p).toInt)
}
def fourProcesses(a: Array[Int], p: Double): Int = {
val mid1 = a.length / 4
val mid2 = a.length / 2
val mid3 = a.length / 2 + a.length / 4
val ((part1, part2), (part3, part4)) =
parallel(
parallel(sumSegments(a, p, 0, 0), sumSegments(a, p, mid1, mid2)),
parallel(sumSegments(a, p, mid2, mid3), sumSegments(a, p, mid3, a.length))
)
Math.pow(part1 + part2 + part3 + part4, 1 / p).toInt
}
val threshold = 0
def anyProcesses(a: Array[Int], p: Double, s: Int, t: Int): Int = {
if (t - s < threshold)
sumSegments(a, p, s, t)
else {
val m = s + (t - s) / 2
val (sum1, sum2) = parallel(anyProcesses(a, p, s, m), anyProcesses(a, p, m, t))
sum1 + sum2
}
}
} | lwo/lwo.github.io | src/main/scala/c3/w1/PNorm.scala | Scala | gpl-3.0 | 1,103 |
package controllers
import play.api.mvc._
import play.twirl.api.Html
import lila.api.Context
import lila.app._
import lila.game.{ GameRepo, Game => GameModel, Pov }
import views._
object Tv extends LilaController {
def index = onChannel(lila.tv.Tv.Channel.Best.key)
def onChannel(chanKey: String) = Open { implicit ctx =>
(lila.tv.Tv.Channel.byKey get chanKey).fold(notFound)(lichessTv)
}
def sides(chanKey: String, gameId: String, color: String) = Open { implicit ctx =>
lila.tv.Tv.Channel.byKey get chanKey match {
case None => notFound
case Some(channel) =>
OptionFuResult(GameRepo.pov(gameId, color)) { pov =>
Env.tv.tv.getChampions zip
Env.game.crosstableApi(pov.game) map {
case (champions, crosstable) => Ok(html.tv.sides(channel, champions, pov, crosstable, streams = Nil))
}
}
}
}
private def lichessTv(channel: lila.tv.Tv.Channel)(implicit ctx: Context) =
OptionFuResult(Env.tv.tv getGame channel) { game =>
val flip = getBool("flip")
val pov = flip.fold(Pov second game, Pov first game)
val onTv = lila.round.OnTv(channel.key, flip)
negotiate(
html = {
Env.api.roundApi.watcher(pov, lila.api.Mobile.Api.currentVersion, tv = onTv.some) zip
Env.game.crosstableApi(game) zip
Env.tv.tv.getChampions map {
case ((data, cross), champions) => NoCache {
Ok(html.tv.index(channel, champions, pov, data, cross, flip))
}
}
},
api = apiVersion => Env.api.roundApi.watcher(pov, apiVersion, tv = onTv.some) map { Ok(_) }
)
}
def games = gamesChannel(lila.tv.Tv.Channel.Best.key)
def gamesChannel(chanKey: String) = Open { implicit ctx =>
(lila.tv.Tv.Channel.byKey get chanKey).fold(notFound)(lichessGames)
}
private def lichessGames(channel: lila.tv.Tv.Channel)(implicit ctx: Context) =
Env.tv.tv.getChampions zip
Env.tv.tv.getGames(channel, 9) map {
case (champs, games) => NoCache {
Ok(html.tv.games(channel, games map lila.game.Pov.first, champs))
}
}
def streamIn(id: String) = Open { implicit ctx =>
OptionFuResult(Env.tv.streamerList find id) { streamer =>
Env.tv.streamsOnAir.all flatMap { streams =>
val others = streams.filter(_.id != id)
streams find (_.id == id) match {
case None => fuccess(Ok(html.tv.notStreaming(streamer, others)))
case Some(s) => fuccess(Ok(html.tv.stream(s, others)))
}
}
}
}
def feed = Action.async {
import makeTimeout.short
import akka.pattern.ask
import lila.round.TvBroadcast
import play.api.libs.EventSource
implicit val encoder = play.api.libs.Comet.CometMessage.jsonMessages
Env.round.tvBroadcast ? TvBroadcast.GetEnumerator mapTo
manifest[TvBroadcast.EnumeratorType] map { enum =>
Ok.chunked(enum &> EventSource()).as("text/event-stream")
}
}
def streamConfig = Auth { implicit ctx =>
me =>
Env.tv.streamerList.store.get.map { text =>
Ok(html.tv.streamConfig(Env.tv.streamerList.form.fill(text)))
}
}
def streamConfigSave = SecureBody(_.StreamConfig) { implicit ctx =>
me =>
implicit val req = ctx.body
FormFuResult(Env.tv.streamerList.form) { err =>
fuccess(html.tv.streamConfig(err))
} { text =>
Env.tv.streamerList.store.set(text) >>
Env.mod.logApi.streamConfig(me.id) inject Redirect(routes.Tv.streamConfig)
}
}
def embed = Action { req =>
Ok {
val bg = get("bg", req) | "light"
val theme = get("theme", req) | "brown"
val url = s"""${req.domain + routes.Tv.frame}?bg=$bg&theme=$theme"""
s"""document.write("<iframe src='http://$url&embed=" + document.domain + "' class='lichess-tv-iframe' allowtransparency='true' frameBorder='0' style='width: 224px; height: 264px;' title='Lichess free online chess'></iframe>");"""
} as JAVASCRIPT withHeaders (CACHE_CONTROL -> "max-age=86400")
}
def frame = Action.async { req =>
Env.tv.tv.getBest map {
case None => NotFound
case Some(game) => Ok(views.html.tv.embed(
Pov first game,
get("bg", req) | "light",
lila.pref.Theme(~get("theme", req)).cssClass
))
}
}
}
| JimmyMow/lila | app/controllers/Tv.scala | Scala | mit | 4,363 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.fuberlin.wiwiss.silk.workbench.lift.snippet
import xml.NodeSeq
import net.liftweb.util.Helpers._
class GenerateLinksToolbar {
def render(xhtml : NodeSeq) : NodeSeq = {
bind("entry", xhtml,
"help" -> <a id="button" href="http://www.assembla.com/spaces/silk/wiki/Evaluation" target="_help">Help</a>
)
}
}
| fusepoolP3/p3-silk | silk-workbench-outdated/src/main/scala/de/fuberlin/wiwiss/silk/workbench/lift/snippet/GenerateLinksToolbar.scala | Scala | apache-2.0 | 902 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.optimizer
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeMap}
import org.apache.spark.sql.catalyst.plans.{Cross, Inner, PlanTest}
import org.apache.spark.sql.catalyst.plans.logical.{ColumnStat, LocalRelation, LogicalPlan}
import org.apache.spark.sql.catalyst.rules.RuleExecutor
import org.apache.spark.sql.catalyst.statsEstimation.{StatsEstimationTestBase, StatsTestPlan}
import org.apache.spark.sql.internal.SQLConf.{CBO_ENABLED, JOIN_REORDER_ENABLED}
class JoinReorderSuite extends PlanTest with StatsEstimationTestBase {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
Batch("Resolve Hints", Once,
EliminateResolvedHint) ::
Batch("Operator Optimizations", FixedPoint(100),
CombineFilters,
PushDownPredicate,
ReorderJoin,
PushPredicateThroughJoin,
ColumnPruning,
CollapseProject) ::
Batch("Join Reorder", Once,
CostBasedJoinReorder) :: Nil
}
object ResolveHints extends RuleExecutor[LogicalPlan] {
val batches =
Batch("Resolve Hints", Once,
EliminateResolvedHint) :: Nil
}
var originalConfCBOEnabled = false
var originalConfJoinReorderEnabled = false
override def beforeAll(): Unit = {
super.beforeAll()
originalConfCBOEnabled = conf.cboEnabled
originalConfJoinReorderEnabled = conf.joinReorderEnabled
conf.setConf(CBO_ENABLED, true)
conf.setConf(JOIN_REORDER_ENABLED, true)
}
override def afterAll(): Unit = {
try {
conf.setConf(CBO_ENABLED, originalConfCBOEnabled)
conf.setConf(JOIN_REORDER_ENABLED, originalConfJoinReorderEnabled)
} finally {
super.afterAll()
}
}
private val columnInfo: AttributeMap[ColumnStat] = AttributeMap(Seq(
attr("t1.k-1-2") -> rangeColumnStat(2, 0),
attr("t1.v-1-10") -> rangeColumnStat(10, 0),
attr("t2.k-1-5") -> rangeColumnStat(5, 0),
attr("t3.v-1-100") -> rangeColumnStat(100, 0),
attr("t4.k-1-2") -> rangeColumnStat(2, 0),
attr("t4.v-1-10") -> rangeColumnStat(10, 0),
attr("t5.k-1-5") -> rangeColumnStat(5, 0),
attr("t5.v-1-5") -> rangeColumnStat(5, 0)
))
private val nameToAttr: Map[String, Attribute] = columnInfo.map(kv => kv._1.name -> kv._1)
private val nameToColInfo: Map[String, (Attribute, ColumnStat)] =
columnInfo.map(kv => kv._1.name -> kv)
// Table t1/t4: big table with two columns
private val t1 = StatsTestPlan(
outputList = Seq("t1.k-1-2", "t1.v-1-10").map(nameToAttr),
rowCount = 1000,
// size = rows * (overhead + column length)
size = Some(1000 * (8 + 4 + 4)),
attributeStats = AttributeMap(Seq("t1.k-1-2", "t1.v-1-10").map(nameToColInfo)))
private val t4 = StatsTestPlan(
outputList = Seq("t4.k-1-2", "t4.v-1-10").map(nameToAttr),
rowCount = 2000,
size = Some(2000 * (8 + 4 + 4)),
attributeStats = AttributeMap(Seq("t4.k-1-2", "t4.v-1-10").map(nameToColInfo)))
// Table t2/t3: small table with only one column
private val t2 = StatsTestPlan(
outputList = Seq("t2.k-1-5").map(nameToAttr),
rowCount = 20,
size = Some(20 * (8 + 4)),
attributeStats = AttributeMap(Seq("t2.k-1-5").map(nameToColInfo)))
private val t3 = StatsTestPlan(
outputList = Seq("t3.v-1-100").map(nameToAttr),
rowCount = 100,
size = Some(100 * (8 + 4)),
attributeStats = AttributeMap(Seq("t3.v-1-100").map(nameToColInfo)))
// Table t5: small table with two columns
private val t5 = StatsTestPlan(
outputList = Seq("t5.k-1-5", "t5.v-1-5").map(nameToAttr),
rowCount = 20,
size = Some(20 * (8 + 4)),
attributeStats = AttributeMap(Seq("t5.k-1-5", "t5.v-1-5").map(nameToColInfo)))
test("reorder 3 tables") {
val originalPlan =
t1.join(t2).join(t3).where((nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")) &&
(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
// The cost of original plan (use only cardinality to simplify explanation):
// cost = cost(t1 J t2) = 1000 * 20 / 5 = 4000
// In contrast, the cost of the best plan:
// cost = cost(t1 J t3) = 1000 * 100 / 100 = 1000 < 4000
// so (t1 J t3) J t2 is better (has lower cost, i.e. intermediate result size) than
// the original order (t1 J t2) J t3.
val bestPlan =
t1.join(t3, Inner, Some(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.join(t2, Inner, Some(nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")))
.select(outputsOf(t1, t2, t3): _*)
assertEqualPlans(originalPlan, bestPlan)
}
test("put unjoinable item at the end and reorder 3 joinable tables") {
// The ReorderJoin rule puts the unjoinable item at the end, and then CostBasedJoinReorder
// reorders other joinable items.
val originalPlan =
t1.join(t2).join(t4).join(t3).where((nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")) &&
(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
val bestPlan =
t1.join(t3, Inner, Some(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.join(t2, Inner, Some(nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")))
.select(outputsOf(t1, t2, t3): _*) // this is redundant but we'll take it for now
.join(t4)
.select(outputsOf(t1, t2, t4, t3): _*)
assertEqualPlans(originalPlan, bestPlan)
}
test("reorder 3 tables with pure-attribute project") {
val originalPlan =
t1.join(t2).join(t3).where((nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")) &&
(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.select(nameToAttr("t1.v-1-10"))
val bestPlan =
t1.join(t3, Inner, Some(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.select(nameToAttr("t1.k-1-2"), nameToAttr("t1.v-1-10"))
.join(t2, Inner, Some(nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")))
.select(nameToAttr("t1.v-1-10"))
assertEqualPlans(originalPlan, bestPlan)
}
test("reorder 3 tables - one of the leaf items is a project") {
val originalPlan =
t1.join(t5).join(t3).where((nameToAttr("t1.k-1-2") === nameToAttr("t5.k-1-5")) &&
(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.select(nameToAttr("t1.v-1-10"))
// Items: t1, t3, project(t5.k-1-5, t5)
val bestPlan =
t1.join(t3, Inner, Some(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.select(nameToAttr("t1.k-1-2"), nameToAttr("t1.v-1-10"))
.join(t5.select(nameToAttr("t5.k-1-5")), Inner,
Some(nameToAttr("t1.k-1-2") === nameToAttr("t5.k-1-5")))
.select(nameToAttr("t1.v-1-10"))
assertEqualPlans(originalPlan, bestPlan)
}
test("don't reorder if project contains non-attribute") {
val originalPlan =
t1.join(t2, Inner, Some(nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")))
.select((nameToAttr("t1.k-1-2") + nameToAttr("t2.k-1-5")) as "key", nameToAttr("t1.v-1-10"))
.join(t3, Inner, Some(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.select("key".attr)
assertEqualPlans(originalPlan, originalPlan)
}
test("reorder 4 tables (bushy tree)") {
val originalPlan =
t1.join(t4).join(t2).join(t3).where((nameToAttr("t1.k-1-2") === nameToAttr("t4.k-1-2")) &&
(nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")) &&
(nameToAttr("t4.v-1-10") === nameToAttr("t3.v-1-100")))
// The cost of original plan (use only cardinality to simplify explanation):
// cost(t1 J t4) = 1000 * 2000 / 2 = 1000000, cost(t1t4 J t2) = 1000000 * 20 / 5 = 4000000,
// cost = cost(t1 J t4) + cost(t1t4 J t2) = 5000000
// In contrast, the cost of the best plan (a bushy tree):
// cost(t1 J t2) = 1000 * 20 / 5 = 4000, cost(t4 J t3) = 2000 * 100 / 100 = 2000,
// cost = cost(t1 J t2) + cost(t4 J t3) = 6000 << 5000000.
val bestPlan =
t1.join(t2, Inner, Some(nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")))
.join(t4.join(t3, Inner, Some(nameToAttr("t4.v-1-10") === nameToAttr("t3.v-1-100"))),
Inner, Some(nameToAttr("t1.k-1-2") === nameToAttr("t4.k-1-2")))
.select(outputsOf(t1, t4, t2, t3): _*)
assertEqualPlans(originalPlan, bestPlan)
}
test("keep the order of attributes in the final output") {
val outputLists = Seq("t1.k-1-2", "t1.v-1-10", "t3.v-1-100").permutations
while (outputLists.hasNext) {
val expectedOrder = outputLists.next().map(nameToAttr)
val expectedPlan =
t1.join(t3, Inner, Some(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.join(t2, Inner, Some(nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")))
.select(expectedOrder: _*)
// The plan should not change after optimization
assertEqualPlans(expectedPlan, expectedPlan)
}
}
test("SPARK-26352: join reordering should not change the order of attributes") {
// This test case does not rely on CBO.
// It's similar to the test case above, but catches a reordering bug that the one above doesn't
val tab1 = LocalRelation('x.int, 'y.int)
val tab2 = LocalRelation('i.int, 'j.int)
val tab3 = LocalRelation('a.int, 'b.int)
val original =
tab1.join(tab2, Cross)
.join(tab3, Inner, Some('a === 'x && 'b === 'i))
val expected =
tab1.join(tab3, Inner, Some('a === 'x))
.join(tab2, Cross, Some('b === 'i))
.select(outputsOf(tab1, tab2, tab3): _*)
assertEqualPlans(original, expected)
}
test("reorder recursively") {
// Original order:
// Join
// / \\
// Union t5
// / \\
// Join t4
// / \\
// Join t3
// / \\
// t1 t2
val bottomJoins =
t1.join(t2).join(t3).where((nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")) &&
(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.select(nameToAttr("t1.v-1-10"))
val originalPlan = bottomJoins
.union(t4.select(nameToAttr("t4.v-1-10")))
.join(t5, Inner, Some(nameToAttr("t1.v-1-10") === nameToAttr("t5.v-1-5")))
// Should be able to reorder the bottom part.
// Best order:
// Join
// / \\
// Union t5
// / \\
// Join t4
// / \\
// Join t2
// / \\
// t1 t3
val bestBottomPlan =
t1.join(t3, Inner, Some(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.select(nameToAttr("t1.k-1-2"), nameToAttr("t1.v-1-10"))
.join(t2, Inner, Some(nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")))
.select(nameToAttr("t1.v-1-10"))
val bestPlan = bestBottomPlan
.union(t4.select(nameToAttr("t4.v-1-10")))
.join(t5, Inner, Some(nameToAttr("t1.v-1-10") === nameToAttr("t5.v-1-5")))
assertEqualPlans(originalPlan, bestPlan)
}
test("don't reorder if hints present") {
val originalPlan =
t1.join(t2, Inner, Some(nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")))
.hint("broadcast")
.join(
t4.join(t3, Inner, Some(nameToAttr("t4.v-1-10") === nameToAttr("t3.v-1-100")))
.hint("broadcast"),
Inner,
Some(nameToAttr("t1.k-1-2") === nameToAttr("t4.k-1-2")))
assertEqualPlans(originalPlan, originalPlan)
val originalPlan2 =
t1.join(t2, Inner, Some(nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")))
.hint("broadcast")
.join(t4, Inner, Some(nameToAttr("t4.v-1-10") === nameToAttr("t3.v-1-100")))
.hint("broadcast")
.join(t3, Inner, Some(nameToAttr("t1.k-1-2") === nameToAttr("t4.k-1-2")))
assertEqualPlans(originalPlan2, originalPlan2)
val originalPlan3 =
t1.join(t2, Inner, Some(nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")))
.join(t4).hint("broadcast")
.join(t3, Inner, Some(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.join(t5, Inner, Some(nameToAttr("t5.v-1-5") === nameToAttr("t3.v-1-100")))
assertEqualPlans(originalPlan3, originalPlan3)
}
test("reorder below and above the hint node") {
val originalPlan =
t1.join(t2).join(t3)
.where((nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")) &&
(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.hint("broadcast").join(t4)
val bestPlan =
t1.join(t3, Inner, Some(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.join(t2, Inner, Some(nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")))
.select(outputsOf(t1, t2, t3): _*)
.hint("broadcast").join(t4)
assertEqualPlans(originalPlan, bestPlan)
val originalPlan2 =
t1.join(t2).join(t3)
.where((nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")) &&
(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.join(t4.hint("broadcast"))
val bestPlan2 =
t1.join(t3, Inner, Some(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.join(t2, Inner, Some(nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")))
.select(outputsOf(t1, t2, t3): _*)
.join(t4.hint("broadcast"))
assertEqualPlans(originalPlan2, bestPlan2)
val originalPlan3 =
t1.join(t2, Inner, Some(nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")))
.join(t3, Inner, Some(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.hint("broadcast")
.join(t4, Inner, Some(nameToAttr("t4.v-1-10") === nameToAttr("t3.v-1-100")))
.join(t5, Inner, Some(nameToAttr("t5.v-1-5") === nameToAttr("t3.v-1-100")))
val bestPlan3 =
t1.join(t3, Inner, Some(nameToAttr("t1.v-1-10") === nameToAttr("t3.v-1-100")))
.join(t2, Inner, Some(nameToAttr("t1.k-1-2") === nameToAttr("t2.k-1-5")))
.select(outputsOf(t1, t2, t3): _*)
.hint("broadcast")
.join(t4, Inner, Some(nameToAttr("t4.v-1-10") === nameToAttr("t3.v-1-100")))
.join(t5, Inner, Some(nameToAttr("t5.v-1-5") === nameToAttr("t3.v-1-100")))
assertEqualPlans(originalPlan3, bestPlan3)
}
private def assertEqualPlans(
originalPlan: LogicalPlan,
groundTruthBestPlan: LogicalPlan): Unit = {
val analyzed = originalPlan.analyze
val optimized = Optimize.execute(analyzed)
val expected = ResolveHints.execute(groundTruthBestPlan.analyze)
assert(analyzed.sameOutput(expected)) // if this fails, the expected plan itself is incorrect
assert(analyzed.sameOutput(optimized))
compareJoinOrder(optimized, expected)
}
private def outputsOf(plans: LogicalPlan*): Seq[Attribute] = {
plans.map(_.output).reduce(_ ++ _)
}
}
| WindCanDie/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/JoinReorderSuite.scala | Scala | apache-2.0 | 15,467 |
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package scray.querying.queries
import java.util.UUID
import scray.querying.description.{Clause, Columns, ColumnGrouping, ColumnOrdering, QueryRange, TableIdentifier}
import scray.querying.Query
case class SimpleQuery(
space: String,
table: TableIdentifier,
id: UUID = UUID.randomUUID,
columns: Columns = Columns(Left(true)), // simple default meaning all columns
where: Option[Clause] = None, // None means no conditions specified, return all results
grouping: Option[ColumnGrouping] = None, // None means no grouping
ordering: Option[ColumnOrdering[_]] = None, // None means ordering undefined
range: Option[QueryRange] = None // None means no range specified
) extends Query {
def getQueryID: UUID = id
override def getQueryspace: String = space
override def getResultSetColumns: Columns = columns
override def getTableIdentifier: TableIdentifier = table
override def getWhereAST: Option[Clause] = where
override def getGrouping: Option[ColumnGrouping] = grouping
override def getOrdering: Option[ColumnOrdering[_]] = ordering
override def getQueryRange: Option[QueryRange] = range
override def transformedAstCopy(ast: Option[Clause]): SimpleQuery = SimpleQuery(space, table, id, columns, ast, grouping, ordering, range)
}
| scray/scray | scray-querying/modules/scray-querying/src/main/scala/scray/querying/queries/SimpleQuery.scala | Scala | apache-2.0 | 1,976 |
package io.iohk.ethereum.mpt
object HexPrefix {
/**
* Pack nibbles to binary
*
* @param nibbles sequence
* @param isLeaf boolean used to encode whether or not the data being encoded corresponds to a LeafNode or an ExtensionNode
* @return hex-encoded byte array
*/
def encode(nibbles: Array[Byte], isLeaf: Boolean): Array[Byte] = {
val hasOddLength = nibbles.length % 2 == 1
val firstByteFlag: Byte = (2 * (if (isLeaf) 1 else 0) + (if (hasOddLength) 1 else 0)).toByte
val lengthFlag = if (hasOddLength) 1 else 2
val nibblesWithFlag = new Array[Byte](nibbles.length + lengthFlag)
Array.copy(nibbles, 0, nibblesWithFlag, lengthFlag, nibbles.length)
nibblesWithFlag(0) = firstByteFlag
if (!hasOddLength) nibblesWithFlag(1) = 0
nibblesToBytes(nibblesWithFlag)
}
/**
* Unpack a binary string to its nibbles equivalent
*
* @param src of binary data
* @return array of nibbles in byte-format and
* boolean used to encode whether or not the data being decoded corresponds to a LeafNode or an ExtensionNode
*/
def decode(src: Array[Byte]): (Array[Byte], Boolean) = {
val srcNibbles: Array[Byte] = bytesToNibbles(bytes = src)
val t = (srcNibbles(0) & 2) != 0
val hasOddLength = (srcNibbles(0) & 1) != 0
val flagLength = if (hasOddLength) 1 else 2
val res = new Array[Byte](srcNibbles.length - flagLength)
Array.copy(srcNibbles, flagLength, res, 0, srcNibbles.length - flagLength)
(res, t)
}
/**
* Transforms an array of 8bit values to the corresponding array of 4bit values (hexadecimal format)
* Needs to be as fast possible, which requires usage of var's and mutable arrays.
* @param bytes byte[]
* @return array with each individual nibble
*/
def bytesToNibbles(bytes: Array[Byte]): Array[Byte] = {
val newArray = new Array[Byte](bytes.length * 2)
var i = 0
var n = 0
while (i < bytes.length) {
newArray(n) = ((bytes(i) >> 4) & 0xf).toByte
newArray(n + 1) = (bytes(i) & 0xf).toByte
n = n + 2
i = i + 1
}
newArray
}
/**
* Transforms an array of 4bit values (hexadecimal format) to the corresponding array of 8bit values
* Needs to be as fast possible, which requires usage of var's and mutable arrays.
* @param nibbles byte[]
* @return array with bytes combining pairs of nibbles
*/
def nibblesToBytes(nibbles: Array[Byte]): Array[Byte] = {
require(nibbles.length % 2 == 0)
val newArray = new Array[Byte](nibbles.length / 2)
var i = 0
var n = 0
while (i < nibbles.length) {
val newValue = (16 * nibbles(i) + nibbles(i + 1)).toByte
newArray(n) = newValue
n = n + 1
i = i + 2
}
newArray
}
}
| input-output-hk/etc-client | src/main/scala/io/iohk/ethereum/mpt/HexPrefix.scala | Scala | mit | 2,769 |
package forimpatient.chapter11
/**
* Created by Iryna Kharaborkina on 8/9/16.
*
* Solution to the Chapter 11 Exercise 09 'Scala for the Impatient' by Horstmann C.S.
*
* Define an unapply operation for the RichFile class that extracts the file path, name, and extension.
* For example, the file /home/cay/readme.txt has path /home/cay, name readme, and extension txt.
*/
object Exercise09 extends App {
println("Chapter 11 Exercise 09")
val RichFile(path, name, extention) = "/home/cay/readme.txt"
println(path)
println(name)
println(extention)
object RichFile {
def unapply(fullPath: String) = {
val nameIndex = fullPath.lastIndexOf("/")
val extIndex = fullPath.lastIndexOf(".")
if (nameIndex == -1 && extIndex == -1) Some("", fullPath, "")
else if (nameIndex == -1) Some("", fullPath.substring(0, extIndex), fullPath.substring(extIndex + 1))
else if (extIndex == -1) Some(fullPath.substring(0, nameIndex), fullPath.substring(nameIndex + 1), "")
else Some(fullPath.substring(0, nameIndex), fullPath.substring(nameIndex + 1, extIndex),
fullPath.substring(extIndex + 1))
}
}
}
| Kiryna/Scala-for-the-Impatient | src/forimpatient/chapter11/Exercise09.scala | Scala | apache-2.0 | 1,161 |
package eventstore
package akka
package cluster
import _root_.akka.actor.ActorSystem
import java.net.InetSocketAddress
import scala.concurrent._
import sttp.model._
import sttp.client3._
import sttp.client3.circe._
import eventstore.core.cluster.ClusterInfo
private[eventstore] object ClusterInfoOf {
type FutureFunc = InetSocketAddress => Future[ClusterInfo]
def apply(useTls: Boolean)(implicit system: ActorSystem): FutureFunc = {
import CirceDecoders._
import system.dispatcher
val sttp = Http.mkSttpFutureBackend(useTls, system)
def clusterInfo(address: InetSocketAddress): Future[ClusterInfo] = {
val scheme = if(useTls) "https" else "http"
val host = address.getHostString
val port = address.getPort
val uri = uri"$scheme://$host:$port/gossip?format=json"
basicRequest
.get(uri)
.contentType(MediaType.ApplicationJson)
.response(asJson[ClusterInfo].getRight)
.send(sttp)
.map(_.body)
}
clusterInfo
}
}
| EventStore/EventStore.JVM | client/src/main/scala/eventstore/akka/cluster/ClusterInfoOf.scala | Scala | bsd-3-clause | 1,026 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.network.netty
import java.nio.ByteBuffer
import scala.collection.JavaConverters._
import scala.concurrent.{Future, Promise}
import scala.reflect.ClassTag
import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.network._
import org.apache.spark.network.buffer.ManagedBuffer
import org.apache.spark.network.client.{RpcResponseCallback, TransportClientBootstrap, TransportClientFactory}
import org.apache.spark.network.crypto.{AuthClientBootstrap, AuthServerBootstrap}
import org.apache.spark.network.server._
import org.apache.spark.network.shuffle.{BlockFetchingListener, OneForOneBlockFetcher, RetryingBlockFetcher}
import org.apache.spark.network.shuffle.protocol.UploadBlock
import org.apache.spark.network.util.JavaUtils
import org.apache.spark.serializer.JavaSerializer
import org.apache.spark.storage.{BlockId, StorageLevel}
import org.apache.spark.util.Utils
/**
* A BlockTransferService that uses Netty to fetch a set of blocks at time.
*/
private[spark] class NettyBlockTransferService(
conf: SparkConf,
securityManager: SecurityManager,
bindAddress: String,
override val hostName: String,
_port: Int,
numCores: Int)
extends BlockTransferService {
// TODO: Don't use Java serialization, use a more cross-version compatible serialization format.
private val serializer = new JavaSerializer(conf)
private val authEnabled = securityManager.isAuthenticationEnabled()
private val transportConf = SparkTransportConf.fromSparkConf(conf, "shuffle", numCores)
private[this] var transportContext: TransportContext = _
private[this] var server: TransportServer = _
private[this] var clientFactory: TransportClientFactory = _
private[this] var appId: String = _
override def init(blockDataManager: BlockDataManager): Unit = {
val rpcHandler = new NettyBlockRpcServer(conf.getAppId, serializer, blockDataManager)
var serverBootstrap: Option[TransportServerBootstrap] = None
var clientBootstrap: Option[TransportClientBootstrap] = None
if (authEnabled) {
serverBootstrap = Some(new AuthServerBootstrap(transportConf, securityManager))
clientBootstrap = Some(new AuthClientBootstrap(transportConf, conf.getAppId, securityManager))
}
transportContext = new TransportContext(transportConf, rpcHandler)
clientFactory = transportContext.createClientFactory(clientBootstrap.toSeq.asJava)
server = createServer(serverBootstrap.toList)
appId = conf.getAppId
logInfo(s"Server created on ${hostName}:${server.getPort}")
}
/** Creates and binds the TransportServer, possibly trying multiple ports. */
private def createServer(bootstraps: List[TransportServerBootstrap]): TransportServer = {
def startService(port: Int): (TransportServer, Int) = {
val server = transportContext.createServer(bindAddress, port, bootstraps.asJava)
(server, server.getPort)
}
Utils.startServiceOnPort(_port, startService, conf, getClass.getName)._1
}
override def fetchBlocks(
host: String,
port: Int,
execId: String,
blockIds: Array[String],
listener: BlockFetchingListener): Unit = {
logTrace(s"Fetch blocks from $host:$port (executor id $execId)")
try {
val blockFetchStarter = new RetryingBlockFetcher.BlockFetchStarter {
override def createAndStart(blockIds: Array[String], listener: BlockFetchingListener) {
val client = clientFactory.createClient(host, port)
new OneForOneBlockFetcher(client, appId, execId, blockIds.toArray, listener).start()
}
}
val maxRetries = transportConf.maxIORetries()
if (maxRetries > 0) {
// Note this Fetcher will correctly handle maxRetries == 0; we avoid it just in case there's
// a bug in this code. We should remove the if statement once we're sure of the stability.
new RetryingBlockFetcher(transportConf, blockFetchStarter, blockIds, listener).start()
} else {
blockFetchStarter.createAndStart(blockIds, listener)
}
} catch {
case e: Exception =>
logError("Exception while beginning fetchBlocks", e)
blockIds.foreach(listener.onBlockFetchFailure(_, e))
}
}
override def port: Int = server.getPort
override def uploadBlock(
hostname: String,
port: Int,
execId: String,
blockId: BlockId,
blockData: ManagedBuffer,
level: StorageLevel,
classTag: ClassTag[_]): Future[Unit] = {
val result = Promise[Unit]()
val client = clientFactory.createClient(hostname, port)
// StorageLevel and ClassTag are serialized as bytes using our JavaSerializer.
// Everything else is encoded using our binary protocol.
val metadata = JavaUtils.bufferToArray(serializer.newInstance().serialize((level, classTag)))
// Convert or copy nio buffer into array in order to serialize it.
val array = JavaUtils.bufferToArray(blockData.nioByteBuffer())
client.sendRpc(new UploadBlock(appId, execId, blockId.toString, metadata, array).toByteBuffer,
new RpcResponseCallback {
override def onSuccess(response: ByteBuffer): Unit = {
logTrace(s"Successfully uploaded block $blockId")
result.success((): Unit)
}
override def onFailure(e: Throwable): Unit = {
logError(s"Error while uploading block $blockId", e)
result.failure(e)
}
})
result.future
}
override def close(): Unit = {
if (server != null) {
server.close()
}
if (clientFactory != null) {
clientFactory.close()
}
}
}
| MLnick/spark | core/src/main/scala/org/apache/spark/network/netty/NettyBlockTransferService.scala | Scala | apache-2.0 | 6,414 |
package org.phillipgreenii.codedependencytracker
import java.net.URI
object UriEntity {
def fromUriString(uriString: String): UriEntity = {
new UriEntity(URI.create(uriString).normalize())
}
}
class UriEntity(val uri: URI) extends Entity {
type Self = UriEntity
def id(): String = uri.toString
}
| phillipgreenii/code-dependency-tracker | src/main/scala/org/phillipgreenii/codedependencytracker/UriEntity.scala | Scala | mit | 312 |
package com.github.tminglei.slickpg
package array
import scala.reflect.ClassTag
import slick.driver.{PostgresDriver, JdbcTypesComponent}
import slick.profile.RelationalProfile.ColumnOption.Length
import java.sql.{ResultSet, PreparedStatement}
trait PgArrayJdbcTypes extends JdbcTypesComponent { driver: PostgresDriver =>
@deprecated(message = "use 'new SimpleArrayJdbcType[T](..).to[SEQ[T]](..)' instead", since = "0.7.1")
class SimpleArrayListJdbcType[T](sqlBaseType: String)(
implicit override val classTag: ClassTag[List[T]], tag: ClassTag[T])
extends WrappedConvArrayJdbcType[T, List](
new SimpleArrayJdbcType(sqlBaseType), _.toList) {
def basedOn[U](tmap: T => U, tcomap: U => T): DriverJdbcType[List[T]] =
delegate.asInstanceOf[SimpleArrayJdbcType[T]].basedOn(tmap, tcomap).to(_.toList)
}
//
class SimpleArrayJdbcType[T](sqlBaseType: String)(
implicit override val classTag: ClassTag[Seq[T]], tag: ClassTag[T])
extends DriverJdbcType[Seq[T]] {
override def sqlType: Int = java.sql.Types.ARRAY
override def sqlTypeName(size: Option[Length]): String = s"$sqlBaseType ARRAY"
override def getValue(r: ResultSet, idx: Int): Seq[T] = {
val value = r.getArray(idx)
if (r.wasNull) null else value.getArray.asInstanceOf[Array[Any]].map(_.asInstanceOf[T])
}
override def setValue(vList: Seq[T], p: PreparedStatement, idx: Int): Unit = p.setArray(idx, mkArray(vList))
override def updateValue(vList: Seq[T], r: ResultSet, idx: Int): Unit = r.updateArray(idx, mkArray(vList))
override def hasLiteralForm: Boolean = false
override def valueToSQLLiteral(vList: Seq[T]) = if(vList eq null) "NULL" else s"'${buildArrayStr(vList)}'"
//--
private def mkArray(v: Seq[T]): java.sql.Array = utils.SimpleArrayUtils.mkArray(buildArrayStr)(sqlBaseType, v)
protected def buildArrayStr(vList: Seq[Any]): String = utils.SimpleArrayUtils.mkString[Any](_.toString)(vList)
///
def basedOn[U](tmap: T => U, tcomap: U => T): SimpleArrayJdbcType[T] =
new SimpleArrayJdbcType[T](sqlBaseType) {
override def getValue(r: ResultSet, idx: Int): Seq[T] = {
val value = r.getArray(idx)
if (r.wasNull) null else value.getArray.asInstanceOf[Array[Any]]
.map(e => tcomap(e.asInstanceOf[U]))
}
//--
override protected def buildArrayStr(v: Seq[Any]): String = super.buildArrayStr(v.map(e => tmap(e.asInstanceOf[T])))
}
def to[SEQ[T] <: Seq[T]](conv: Seq[T] => SEQ[T])(implicit classTag: ClassTag[SEQ[T]]): DriverJdbcType[SEQ[T]] =
new WrappedConvArrayJdbcType[T, SEQ](this, conv)
}
/* alias, added for back compatible */
@deprecated(message = "use AdvancedArrayListJdbcType instead", since = "0.6.5")
type NestedArrayListJdbcType[T] = AdvancedArrayListJdbcType[T]
///-- can be used to map complex composite/nested array
@deprecated(message = "use 'new AdvancedArrayJdbcType[T](..).to[SEQ[T]](..)' instead", since = "0.7.1")
class AdvancedArrayListJdbcType[T](sqlBaseType: String,
fromString: (String => List[T]),
mkString: (List[T] => String))(
implicit override val classTag: ClassTag[List[T]], tag: ClassTag[T])
extends WrappedConvArrayJdbcType[T, List](
new AdvancedArrayJdbcType(sqlBaseType, fromString, v => mkString(v.toList)), _.toList)
//
class AdvancedArrayJdbcType[T](sqlBaseType: String,
fromString: (String => Seq[T]),
mkString: (Seq[T] => String))(
implicit override val classTag: ClassTag[Seq[T]], tag: ClassTag[T])
extends DriverJdbcType[Seq[T]] {
override def sqlType: Int = java.sql.Types.ARRAY
override def sqlTypeName(size: Option[Length]): String = s"$sqlBaseType ARRAY"
override def getValue(r: ResultSet, idx: Int): Seq[T] = {
val value = r.getString(idx)
if (r.wasNull) null else fromString(value)
}
override def setValue(vList: Seq[T], p: PreparedStatement, idx: Int): Unit = p.setArray(idx, mkArray(vList))
override def updateValue(vList: Seq[T], r: ResultSet, idx: Int): Unit = r.updateArray(idx, mkArray(vList))
override def hasLiteralForm: Boolean = false
override def valueToSQLLiteral(vList: Seq[T]) = if(vList eq null) "NULL" else s"'${mkString(vList)}'"
//--
private def mkArray(v: Seq[T]): java.sql.Array = utils.SimpleArrayUtils.mkArray(mkString)(sqlBaseType, v)
def to[SEQ[T] <: Seq[T]](conv: Seq[T] => SEQ[T])(implicit classTag: ClassTag[SEQ[T]]): DriverJdbcType[SEQ[T]] =
new WrappedConvArrayJdbcType[T, SEQ](this, conv)
}
/////////////////////////////////////////////////////////////////////////////////////////////
private[array] class WrappedConvArrayJdbcType[T, SEQ[T] <: Seq[T]](val delegate: DriverJdbcType[Seq[T]], val conv: Seq[T] => SEQ[T])(
implicit override val classTag: ClassTag[SEQ[T]], tag: ClassTag[T]) extends DriverJdbcType[SEQ[T]] {
override def sqlType: Int = delegate.sqlType
override def sqlTypeName(size: Option[Length]): String = delegate.sqlTypeName(size)
override def getValue(r: ResultSet, idx: Int): SEQ[T] = Option(delegate.getValue(r, idx)).map(conv).getOrElse(null.asInstanceOf[SEQ[T]])
override def setValue(vList: SEQ[T], p: PreparedStatement, idx: Int): Unit = delegate.setValue(vList, p, idx)
override def updateValue(vList: SEQ[T], r: ResultSet, idx: Int): Unit = delegate.updateValue(vList, r, idx)
override def hasLiteralForm: Boolean = delegate.hasLiteralForm
override def valueToSQLLiteral(vList: SEQ[T]) = delegate.valueToSQLLiteral(Option(vList).orNull)
}
}
| rajbharath/slick-pg | core/src/main/scala/com/github/tminglei/slickpg/array/PgArrayJdbcTypes.scala | Scala | bsd-2-clause | 5,844 |
package tmvault.util
import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import java.util.zip._
import scala.annotation.tailrec
object ThreadLocalCompressionUtil {
def gzip(data: Array[Byte]): Array[Byte] = {
val out = new ByteArrayOutputStream()
val compressor = new GZIPOutputStream(out)
compressor.write(data)
compressor.close()
out.toByteArray()
}
def gunzip(data: Array[Byte]): Array[Byte] = {
val out = new ByteArrayOutputStream()
val decompressor = new GZIPInputStream(new ByteArrayInputStream(data))
val buffer = new Array[Byte](1024)
@tailrec
def copy() : Unit = {
val n = decompressor.read(buffer)
if (n >= 0) {
out.write(buffer, 0, n)
copy()
}
}
copy()
decompressor.close()
out.toByteArray()
}
def deflate(data: Array[Byte]): Array[Byte] = {
val out = new ByteArrayOutputStream()
val compressor = new DeflaterOutputStream(out)
compressor.write(data)
compressor.close()
out.toByteArray()
}
def inflate(data: Array[Byte]): Array[Byte] = {
val out = new ByteArrayOutputStream()
val decompressor = new InflaterInputStream(new ByteArrayInputStream(data))
val buffer = new Array[Byte](1024)
@tailrec
def copy() : Unit = {
val n = decompressor.read(buffer)
if (n >= 0) {
out.write(buffer, 0, n)
copy()
}
}
copy()
decompressor.close()
out.toByteArray()
}
} | rklaehn/tmvault | tmvault/src/main/scala/tmvault/util/ThreadLocalCompressionUtil.scala | Scala | apache-2.0 | 1,471 |
package com.rgoarchitects.example
import java.util.Calendar
/**
* User: arnonrgo
* Date: 12/7/14
* Time: 10:36 AM
*/
object CrmCall{
def apply(line: String) :CrmCall ={
val values = line.split("/t")
val userId: Long = values(0).toLong
val callTime: Long = values(1).toLong
val duration: Long = values(2).toLong
val issue = values(3)
val experience = values(4).toDouble
val team = values(5).toInt
val rep = values(6).toInt
val resolved = values(7).toBoolean
val eventType: String = values(8)
val comments: String = values(9)
val cal = Calendar.getInstance()
cal.setTimeInMillis(callTime)
val year: Int = cal.get(Calendar.YEAR)
val month: Int = cal.get(Calendar.MONTH) + 1
val day: Int = cal.get(Calendar.DAY_OF_MONTH)
val hour: Int = cal.get(Calendar.HOUR_OF_DAY)
CrmCall(userId, callTime, duration,issue,experience,team,rep,resolved,eventType, comments, year, month, day, hour)
}
}
case class CrmCall(userId : Long,
callTime:Long,
duration : Long,
issue : String,
experience : Double,
team : Int,
rep : Int,
resolved : Boolean,
eventType: String,
comments: String,
year : Int,
month : Int,
day : Int,
hour : Int
) extends Event {}
| Arnonrgo/spark-parquet-example | src/main/scala/com/rgoarchitects/example/CrmCall.scala | Scala | apache-2.0 | 1,517 |
package fpinscala.state
trait RNG {
def nextInt: (Int, RNG) // Should generate a random `Int`. We'll later define other functions in terms of `nextInt`.
}
object RNG {
// NB - this was called SimpleRNG in the book text
case class Simple(seed: Long) extends RNG {
def nextInt: (Int, RNG) = {
val newSeed = (seed * 0x5DEECE66DL + 0xBL) & 0xFFFFFFFFFFFFL // `&` is bitwise AND. We use the current seed to generate a new seed.
val nextRNG = Simple(newSeed) // The next state, which is an `RNG` instance created from the new seed.
val n = (newSeed >>> 16).toInt // `>>>` is right binary shift with zero fill. The value `n` is our new pseudo-random integer.
(n, nextRNG) // The return value is a tuple containing both a pseudo-random integer and the next `RNG` state.
}
}
type Rand[+A] = RNG => (A, RNG)
val int: Rand[Int] = _.nextInt
def unit[A](a: A): Rand[A] =
rng => (a, rng)
def map[A,B](s: Rand[A])(f: A => B): Rand[B] =
rng => {
val (a, rng2) = s(rng)
(f(a), rng2)
}
// ==== My code starts here ====
def nonNegativeInt(rng: RNG): (Int, RNG) = {
val (x, rng2) = rng.nextInt
if (x >= 0) (x, rng2)
else (x - Int.MinValue, rng2)
}
def double(rng: RNG): (Double, RNG) = {
val (x, rng2) = nonNegativeInt(rng)
(x.toDouble / Int.MaxValue, rng2)
}
def intDouble(rng: RNG): ((Int,Double), RNG) = {
val (xi, rng2) = rng.nextInt
val (xd, rng3) = double(rng2)
((xi, xd), rng3)
}
def doubleInt(rng: RNG): ((Double,Int), RNG) = {
val (xd, rng2) = double(rng)
val (xi, rng3) = rng2.nextInt
((xd, xi), rng3)
}
def double3(rng: RNG): ((Double,Double,Double), RNG) = {
val (x1, rng2) = double(rng)
val (x2, rng3) = double(rng2)
val (x3, rng4) = double(rng3)
((x1, x2, x3), rng4)
}
def ints(count: Int)(rng: RNG): (List[Int], RNG) = {
def recur(rrng: RNG, l: List[Int], c: Int): (List[Int], RNG) =
if (c > 0) {
val (x, nextRng) = rrng.nextInt
recur(nextRng, x :: l, c -1)
} else (l, rrng)
recur(rng, Nil, count)
}
def doubleByMap: Rand[Double] =
map(nonNegativeInt)(x => x.toDouble / Int.MaxValue)
def map2[A,B,C](ra: Rand[A], rb: Rand[B])(f: (A, B) => C): Rand[C] =
rng => {
val (a, rng2) = ra(rng)
val (b, rng3) = rb(rng2)
(f(a, b), rng3)
}
def sequence[A](fs: List[Rand[A]]): Rand[List[A]] = {
def recur(li: List[Rand[A]], lo: List[A], rng: RNG): (List[A], RNG) =
li match {
case Nil => (lo, rng)
case h :: t => {
val (x, nextRng) = h(rng)
recur(t, x :: lo, nextRng)
}
}
rng => recur(fs, Nil, rng)
}
def intsBySequence(count: Int)(rng: RNG): (List[Int], RNG) =
sequence(List.fill(count)(RNG.int)).apply(rng)
def flatMap[A,B](f: Rand[A])(g: A => Rand[B]): Rand[B] =
rng => {
val (a, rng2) = f(rng)
val fb = g(a)
fb(rng2)
}
def positiveLessThan(n: Int): Rand[Int] =
flatMap(nonNegativeInt){ i =>
val mod = i % n
if (i + (n-1) - mod > 0) rng => (mod, rng)
else positiveLessThan(n)
}
def mapByFlatm[A,B](s: Rand[A])(f: A => B): Rand[B] =
flatMap(s)(a => rng => (f(a), rng))
def map2ByFlatm[A,B,C](ra: Rand[A], rb: Rand[B])(f: (A, B) => C): Rand[C] =
flatMap(ra)(a => rng => {
val (b, rng2) = rb(rng)
(f(a, b), rng2)
})
}
case class State[S,+A](run: S => (A, S)) {
def map[B](f: A => B): State[S, B] =
State(s => {
val (a, s2) = this.run(s)
(f(a), s2)
})
def map2[B,C](sb: State[S, B])(f: (A, B) => C): State[S, C] =
State(s => {
val (a, s2) = this.run(s)
val (b, s3) = sb.run(s2)
(f(a,b), s3)
})
def flatMap[B](f: A => State[S, B]): State[S, B] =
State(s => {
val (a, s2) = this.run(s)
f(a).run(s2)
})
}
object State {
type Rand[A] = State[RNG, A]
def sequence[S,B](ls: List[State[S,B]]): State[S,List[B]] = {
def recur(li: List[State[S,B]], lo: List[B], s: S): (List[B], S) =
li match {
case Nil => (lo, s)
case h :: t => {
val (b, s2) = h.run(s)
recur(t, b :: lo, s2)
}
}
State(s => recur(ls, Nil, s))
}
def unit[S,A](a: A): State[S,A] = State(s => (a, s))
def get[S]: State[S,S] = State(s => (s, s))
def set[S](s: S): State[S,Unit] = State(_ => ((), s))
def modify[S](f: S => S): State[S,Unit] = for {
s <- get
_ <- set(f(s))
} yield ()
}
sealed trait Input
case object Coin extends Input
case object Turn extends Input
case class Machine(locked: Boolean, candies: Int, coins: Int)
object Machine {
def updateState(input: Input)(s: Machine): Machine =
(input, s) match {
case (_, Machine(_, 0, _)) => s
case (Coin, Machine(false, _, _)) => s
case (Turn, Machine(true, _, _)) => s
case (Coin, Machine(true, ca, co)) => Machine(false, ca, co+1)
case (Turn, Machine(false, ca, co)) => Machine(true, ca-1, co)
}
def simulateMachine(inputs: List[Input]): State[Machine, (Int, Int)] =
for {
ss <- State.sequence(
inputs.map(i => State.modify[Machine](updateState(i)))
)
// "get" is used as a function of a flatMap call.
s <- State.get
} yield (s.coins, s.candies)
}
| boechat107/fpinscala | exercises/src/main/scala/fpinscala/state/State.scala | Scala | mit | 5,404 |
object conv {
implicit def i2s(i: Int): String = ""
}
import conv._
class annot(value: String) extends annotation.ConstantAnnotation
@annot(101) class C
| scala/scala | test/files/neg/t6083.scala | Scala | apache-2.0 | 156 |
package com.socrata.soda.server.wiremodels
import scala.{collection => sc}
import com.rojoma.json.v3.ast.{JObject, JValue}
import com.rojoma.json.v3.codec._
import com.rojoma.json.v3.codec.DecodeError.{InvalidField, InvalidType}
import com.rojoma.json.v3.codec.JsonDecode.DecodeResult
import com.rojoma.json.v3.util.{AutomaticJsonCodecBuilder, JsonKeyStrategy, Strategy}
import com.socrata.soda.server.copy.Stage
import com.socrata.soda.server.responses.DatasetSpecMaltyped
import com.socrata.soda.server.id.ResourceName
import com.socrata.soda.server.wiremodels.InputUtils._
import com.socrata.soda.server.util.AdditionalJsonCodecs._
import com.socrata.soql.environment.ColumnName
case class DatasetSpecSubSet(resourceName: ResourceName)
object DatasetSpecSubSet{
implicit val jsonCodec = AutomaticJsonCodecBuilder[DatasetSpecSubSet]
}
@JsonKeyStrategy(Strategy.Underscore)
case class DatasetSpec(resourceName: ResourceName,
name:String,
description: String,
rowIdentifier: ColumnName,
locale:String,
stage: Option[Stage],
columns:Map[ColumnName, ColumnSpec])
object DatasetSpec {
private implicit val columnMapCodec = new JsonEncode[Map[ColumnName, ColumnSpec]] with JsonDecode[Map[ColumnName, ColumnSpec]]{
def encode(x: Map[ColumnName, ColumnSpec]): JValue =
JObject(x.map { case (k,v) => k.name -> JsonEncode.toJValue(v) })
def decode(x: JValue): DecodeResult[Map[ColumnName, ColumnSpec]] = x match {
case JObject(fields) =>
val r = Map.newBuilder[ColumnName, ColumnSpec]
fields foreach { case (k, v) =>
JsonDecode.fromJValue[ColumnSpec](v) match {
case Right(col) => r += new ColumnName(k) -> col
case Left(_) => return Left(InvalidField(k.toString))
}
}
Right(r.result())
case u =>
Left(InvalidType(JObject, u.jsonType))
}
}
implicit val jsonCodec = AutomaticJsonCodecBuilder[DatasetSpec]
}
case class UserProvidedDatasetSpec(resourceName: Option[ResourceName],
name: Option[String],
description: Option[String],
rowIdentifier: Option[ColumnName],
locale: Option[Option[String]],
columns: Option[Seq[UserProvidedColumnSpec]])
object UserProvidedDatasetSpec extends UserProvidedSpec[UserProvidedDatasetSpec] {
def fromObject(obj: JObject): ExtractResult[UserProvidedDatasetSpec] = {
val dex = new DatasetExtractor(obj.fields)
for {
resourceName <- dex.resourceName
name <- dex.name
desc <- dex.description
locale <- dex.locale
rowId <- dex.rowId
columns <- dex.columns
} yield {
UserProvidedDatasetSpec(resourceName, name, desc, rowId, locale, columns)
}
}
// Using this class instead of AutomaticJsonCodecBuilder allows us to
// return a specific SodaResponse citing what part of the extraction failed.
private class DatasetExtractor(map: sc.Map[String, JValue]) {
val context = new ExtractContext(DatasetSpecMaltyped)
import context._
private def e[T : Decoder](field: String): ExtractResult[Option[T]] =
extract[T](map, field)
def resourceName= e[ResourceName]("resource_name")
def name = e[String]("name")
def description = e[String]("description")
def rowId = e[ColumnName]("row_identifier")
def locale = e[Option[String]]("locale")
def stage = e[Option[Stage]]("stage")
def columns: ExtractResult[Option[Seq[UserProvidedColumnSpec]]] =
e[Seq[JObject]]("columns") flatMap {
case Some(arr) => ExtractResult.sequence(arr.map(UserProvidedColumnSpec.fromObject)).map(Some(_))
case None => Extracted(None)
}
}
}
| socrata-platform/soda-fountain | soda-fountain-lib/src/main/scala/com/socrata/soda/server/wiremodels/DatasetSpec.scala | Scala | apache-2.0 | 3,909 |
/*
* Copyright 2014 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.storage.anormdb
import anorm._
import anorm.SqlParser._
import java.sql.{Blob, Connection, DriverManager, SQLException}
import com.twitter.util.{Try, Return, Throw}
import com.twitter.util.{Await, Duration, Future, FuturePool, Time, Try}
import java.util.concurrent.{ TimeUnit, ArrayBlockingQueue }
import java.util.concurrent.atomic.{ AtomicInteger, AtomicBoolean }
import collection.JavaConversions._
import scala.collection.mutable.ArrayBuffer
import com.twitter.logging.Logger
case class PooledConnection( val connection : Connection, pool : ConnectionPool ) {
def release() {
pool.release(connection)
}
def close() {
pool.close(connection)
}
}
class ConnectionPool(db : SpanStoreDB, maxSize : Int) {
private[this] val connections : ArrayBlockingQueue[Connection] = new ArrayBlockingQueue[Connection](maxSize)
private[this] val isClosed = new AtomicBoolean(false)
private[this] val leasedConnectionsCount = new AtomicInteger(0)
def get() = {
if (isClosed.get){
None
} else {
val conn = if (leasedConnectionsCount.get() < maxSize){
connections.poll
} else {
connections.poll(1, TimeUnit.SECONDS)
}
if (conn == null){
if (leasedConnectionsCount.get() < maxSize){
val freshConnection = db.getConnection()
leasedConnectionsCount.incrementAndGet()
Some(PooledConnection(freshConnection, this))
} else {
None
}
} else {
leasedConnectionsCount.incrementAndGet()
Some(PooledConnection(conn, this))
}
}
}
def release(conn : Connection){
leasedConnectionsCount.decrementAndGet()
if (isClosed.get || !connections.offer(conn)){
conn.close()
}
}
def close(conn : Connection){
leasedConnectionsCount.decrementAndGet()
try {
if (!conn.isClosed())
conn.close()
} finally {
connections.remove(conn)
}
}
def close() {
isClosed.set(true)
val target = ArrayBuffer[Connection]()
connections.drainTo(target)
target.map( _.close() )
}
}
case class SpanStoreDB(location: String,
connectionPoolSize : Int = 1,
pool: FuturePool = FuturePool.unboundedPool,
log : Logger = Logger.get("SpanStoreDB")) {
val (driver, blobType, autoIncrementSql) = location.split(":").toList match {
case "sqlite" :: _ =>
("org.sqlite.JDBC", "BLOB", "INTEGER PRIMARY KEY AUTOINCREMENT")
case "h2" :: _ =>
("org.h2.Driver", "BLOB", "BIGINT NOT NULL AUTO_INCREMENT PRIMARY KEY")
case "postgresql" :: _ =>
("org.postgresql.Driver", "BYTEA", "BIGSERIAL PRIMARY KEY")
case "mysql" :: _ =>
("com.mysql.jdbc.Driver", "MEDIUMBLOB", "BIGINT NOT NULL AUTO_INCREMENT PRIMARY KEY")
case db :: _ =>
throw new IllegalArgumentException("Unsupported DB: %s".format(db))
case _ =>
throw new IllegalArgumentException("Unknown DB location: %s".format(location))
}
val connectionPool = new ConnectionPool(this, connectionPoolSize)
// Load the driver
Class.forName(driver)
/**
* Gets an unmanaged java.sql.Connection to the SQL database.
*
* Example usage:
*
* implicit val conn: Connection = (new SpanStoreDB()).getConnection()
* // Do database updates
* conn.close()
*/
def getConnection() = {
DriverManager.getConnection("jdbc:" + location)
}
/**
* shutdown connection pool
*/
def close() = {
connectionPool.close()
}
/**
* Gets a managed java.sql.Connection to the SQL database. Call release() when done or close() to remove from pool
*/
def getPooledConnection() : Option[PooledConnection] = {
connectionPool.get
}
def withConnection[T](f: (Connection) => T ) = {
connectionPool.get match {
case Some(conn) =>
pool {
try {
val resp = f(conn.connection)
conn.release()
resp
} catch {
case t : Throwable =>
conn.close()
throw t
}
}
case None =>
throw new RuntimeException("No db connections available")
}
}
/**
* Execute SQL in a transaction.
*/
def withTransaction[A](code: Connection => A)(implicit conn : Connection): Try[A] = {
val autoCommit = conn.getAutoCommit
try {
conn.setAutoCommit(false)
val result = code(conn)
conn.commit()
Return(result)
}
catch {
case e: Throwable => {
conn.rollback()
Throw(e)
}
}
finally {
conn.setAutoCommit(autoCommit)
}
}
/**
* Attempt to convert a SQL value into a byte array.
*/
protected def valueToByteArrayOption(value: Any): Option[Array[Byte]] = {
value match {
case bytes: Array[Byte] => Some(bytes)
case blob: Blob =>
try Some(blob.getBytes(1, blob.length.asInstanceOf[Int])) catch {
case e: SQLException => None
}
case _ => None
}
}
/**
* Implicitly convert an Anorm row to a byte array.
*/
def rowToByteArray: Column[Array[Byte]] = {
Column.nonNull[Array[Byte]] { (value, meta) =>
val MetaDataItem(qualified, nullable, clazz) = meta
valueToByteArrayOption(value) match {
case Some(bytes) => Right(bytes)
case _ => Left(TypeDoesNotMatch(
"Cannot convert %s:%s to Byte Array for column %s".format(
value, value.asInstanceOf[AnyRef].getClass, qualified)))
}
}
}
/**
* Build a RowParser factory for a byte array column.
*/
def bytes(columnName: String): RowParser[Array[Byte]] = {
get[Array[Byte]](columnName)(rowToByteArray)
}
/**
* Set up the database tables.
*
*/
def install(clear: Boolean = false) : Unit = {
val installJob = withConnection {
implicit conn : Connection =>
if (clear) SQL("DROP TABLE IF EXISTS zipkin_spans").execute()
SQL(
"""CREATE TABLE IF NOT EXISTS zipkin_spans (
| span_id BIGINT NOT NULL,
| parent_id BIGINT,
| trace_id BIGINT NOT NULL,
| span_name VARCHAR(255) NOT NULL,
| debug SMALLINT NOT NULL,
| duration BIGINT,
| created_ts BIGINT
|)
""".stripMargin).execute()
if (clear) SQL("DROP TABLE IF EXISTS zipkin_annotations").execute()
SQL(
"""CREATE TABLE IF NOT EXISTS zipkin_annotations (
| span_id BIGINT NOT NULL,
| trace_id BIGINT NOT NULL,
| span_name VARCHAR(255) NOT NULL,
| service_name VARCHAR(255) NOT NULL,
| value TEXT,
| ipv4 INT,
| port INT,
| a_timestamp BIGINT NOT NULL,
| duration BIGINT
|)
""".stripMargin).execute()
if (clear) SQL("DROP TABLE IF EXISTS zipkin_binary_annotations").execute()
SQL(
"""CREATE TABLE IF NOT EXISTS zipkin_binary_annotations (
| span_id BIGINT NOT NULL,
| trace_id BIGINT NOT NULL,
| span_name VARCHAR(255) NOT NULL,
| service_name VARCHAR(255) NOT NULL,
| annotation_key VARCHAR(255) NOT NULL,
| annotation_value %s,
| annotation_type_value INT NOT NULL,
| ipv4 INT,
| port INT
|)
""".stripMargin.format(blobType)).execute()
if (clear) SQL("DROP TABLE IF EXISTS zipkin_dependencies").execute()
SQL(
"""CREATE TABLE IF NOT EXISTS zipkin_dependencies (
| dlid %s,
| start_ts BIGINT NOT NULL,
| end_ts BIGINT NOT NULL
|)
""".stripMargin.format(autoIncrementSql)).execute()
if (clear) SQL("DROP TABLE IF EXISTS zipkin_dependency_links").execute()
SQL(
"""CREATE TABLE IF NOT EXISTS zipkin_dependency_links (
| dlid BIGINT NOT NULL,
| parent VARCHAR(255) NOT NULL,
| child VARCHAR(255) NOT NULL,
| m0 BIGINT NOT NULL,
| m1 DOUBLE PRECISION NOT NULL,
| m2 DOUBLE PRECISION NOT NULL,
| m3 DOUBLE PRECISION NOT NULL,
| m4 DOUBLE PRECISION NOT NULL
|)
""".stripMargin).execute()
}
Await.result(installJob)
val installIndexesJob = withConnection {
implicit conn : Connection =>
try {
if (SQL("SHOW INDEX from zipkin_spans WHERE key_name='span_spanid_idx'")().isEmpty()){
SQL("CREATE INDEX span_spanid_idx ON zipkin_spans (span_id)").execute()
}
if (SQL("SHOW INDEX from zipkin_spans WHERE key_name='span_parentid_idx'")().isEmpty()){
SQL("CREATE INDEX span_parentid_idx ON zipkin_spans (parent_id)").execute()
}
if (SQL("SHOW INDEX from zipkin_annotations WHERE key_name='anno_span_idx'")().isEmpty()){
SQL("CREATE INDEX anno_span_idx ON zipkin_annotations(span_id)").execute()
}
} catch {
case e : Throwable => log.warning("Unable to create indexes on span store db - continuing anyway")
}
}
Await.result(installIndexesJob);
}
}
| bbc/zipkin | zipkin-anormdb/src/main/scala/com/twitter/zipkin/storage/anormdb/SpanStoreDB.scala | Scala | apache-2.0 | 9,673 |
/*
* This file is part of Apparat.
*
* Copyright (C) 2010 Joa Ebert
* http://www.joa-ebert.com/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package apparat.taas.backend.jbc
import java.lang.String
/**
* @author Joa Ebert
*/
class JbcClassLoader(map: Map[String, Array[Byte]], parent: ClassLoader) extends ClassLoader(parent) {
override protected def findClass(name: String): Class[_] = {
(map get name) match {
case Some(result) => defineClass(name, result, 0, result.length)
case None => parent.loadClass(name)
}
}
}
| joa/apparat | apparat-taas/src/main/scala/apparat/taas/backend/jbc/JbcClassLoader.scala | Scala | lgpl-2.1 | 1,242 |
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2016 Matthias Langer (t3l@threelights.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package edu.latrobe.blaze
import edu.latrobe._
import org.json4s.JsonAST._
object RuntimeStatus {
final def collect()
: JObject = {
logger.trace("Collecting runtime status for edu.latrobe.blaze")
val fields = List.newBuilder[JField]
fields += Json.field("plugins", {
val fields = List.newBuilder[JField]
Plugin.plugins.foreach(plugin => {
try {
fields += Json.field(plugin.name, plugin.collectRuntimeStatus())
}
catch {
case e: Exception =>
fields += Json.field(plugin.name, s"ERROR: $e")
}
})
JObject(fields.result())
})
JObject(fields.result())
}
}
| bashimao/ltudl | blaze/src/main/scala/edu/latrobe/blaze/RuntimeStatus.scala | Scala | apache-2.0 | 1,353 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.s2graph.core
import org.apache.commons.configuration.BaseConfiguration
import org.apache.s2graph.core.Management.JsonModel.Prop
import org.apache.s2graph.core.S2Graph.{DefaultColumnName, DefaultServiceName}
import org.apache.s2graph.core.mysqls.{ColumnMeta, ServiceColumn}
import org.apache.s2graph.core.types.HBaseType
import org.apache.tinkerpop.gremlin.structure.T
object S2GraphFactory {
def generateClassic(g: S2Graph): Unit = {
val marko = g.addVertex(T.id, Int.box(1), "name", "marko", "age", Int.box(29))
val vadas = g.addVertex(T.id, Int.box(2), "name", "vadas", "age", Int.box(27))
val lop = g.addVertex(T.id, Int.box(3), "name", "lop", "lang", "java")
val josh = g.addVertex(T.id, Int.box(4), "name", "josh", "age", Int.box(32))
val ripple = g.addVertex(T.id, Int.box(5), "name", "ripple", "lang", "java")
val peter = g.addVertex(T.id, Int.box(6), "name", "peter", "age", Int.box(35))
marko.addEdge("knows", vadas, T.id, Int.box(7), "weight", Float.box(0.5f))
marko.addEdge("knows", josh, T.id, Int.box(8), "weight", Float.box(1.0f))
marko.addEdge("created", lop, T.id, Int.box(9), "weight", Float.box(0.4f))
josh.addEdge("created", ripple, T.id, Int.box(10), "weight", Float.box(1.0f))
josh.addEdge("created", lop, T.id, Int.box(11), "weight", Float.box(0.4f))
peter.addEdge("created", lop, T.id, Int.box(12), "weight", Float.box(0.2f))
}
def generateModern(g: S2Graph): Unit = {
val marko = g.addVertex(T.id, Int.box(1), T.label, "person", "name", "marko", "age", Int.box(29))
val vadas = g.addVertex(T.id, Int.box(2), T.label, "person", "name", "vadas", "age", Int.box(27))
val lop = g.addVertex(T.id, Int.box(3), T.label, "software", "name", "lop", "lang", "java")
val josh = g.addVertex(T.id, Int.box(4), T.label, "person", "name", "josh", "age", Int.box(32))
val ripple = g.addVertex(T.id, Int.box(5), T.label, "software", "name", "ripple", "lang", "java")
val peter = g.addVertex(T.id, Int.box(6), T.label, "person", "name", "peter", "age", Int.box(35))
marko.addEdge("knows", vadas, T.id, Int.box(7), "weight", Double.box(0.5d))
marko.addEdge("knows", josh, T.id, Int.box(8), "weight", Double.box(1.0d))
marko.addEdge("created", lop, T.id, Int.box(9), "weight", Double.box(0.4d))
josh.addEdge("created", ripple, T.id, Int.box(10), "weight", Double.box(1.0d))
josh.addEdge("created", lop, T.id, Int.box(11), "weight", Double.box(0.4d))
peter.addEdge("created", lop, T.id, Int.box(12), "weight", Double.box(0.2d))
}
def initDefaultSchema(graph: S2Graph): Unit = {
val management = graph.management
// Management.deleteService(DefaultServiceName)
val DefaultService = management.createService(DefaultServiceName, "localhost", "s2graph", 0, None).get
// Management.deleteColumn(DefaultServiceName, DefaultColumnName)
val DefaultColumn = ServiceColumn.findOrInsert(DefaultService.id.get, DefaultColumnName, Some("integer"), HBaseType.DEFAULT_VERSION, useCache = false)
val DefaultColumnMetas = {
ColumnMeta.findOrInsert(DefaultColumn.id.get, "test", "string", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "name", "string", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "age", "integer", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "lang", "string", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "oid", "integer", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "communityIndex", "integer", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "testing", "string", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "string", "string", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "boolean", "boolean", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "long", "long", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "float", "float", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "double", "double", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "integer", "integer", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "aKey", "string", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "x", "integer", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "y", "integer", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "location", "string", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "status", "string", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "myId", "integer", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "acl", "string", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "some", "string", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "this", "string", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "that", "string", useCache = false)
ColumnMeta.findOrInsert(DefaultColumn.id.get, "any", "string", useCache = false)
}
// Management.deleteLabel("_s2graph")
val DefaultLabel = management.createLabel("_s2graph", DefaultService.serviceName, DefaultColumn.columnName, DefaultColumn.columnType,
DefaultService.serviceName, DefaultColumn.columnName, DefaultColumn.columnType, true, DefaultService.serviceName, Nil, Nil, "weak", None, None,
options = Option("""{"skipReverse": false}""")
)
}
def initModernSchema(g: S2Graph): Unit = {
val mnt = g.management
val softwareColumn = Management.createServiceColumn(S2Graph.DefaultServiceName, "software", "integer", Seq(Prop(T.id.toString, "-1", "integer"), Prop("name", "-", "string"), Prop("lang", "-", "string")))
val personColumn = Management.createServiceColumn(S2Graph.DefaultServiceName, "person", "integer",
Seq(Prop(T.id.toString, "-1", "integer"), Prop("name", "-", "string"), Prop("age", "0", "integer"), Prop("location", "-", "string")))
val knows = mnt.createLabel("knows",
S2Graph.DefaultServiceName, "person", "integer",
S2Graph.DefaultServiceName, "person", "integer",
true, S2Graph.DefaultServiceName, Nil, Seq(Prop("weight", "0.0", "double"), Prop("year", "0", "integer")), consistencyLevel = "strong", None, None)
val created = mnt.createLabel("created",
S2Graph.DefaultServiceName, "person", "integer",
S2Graph.DefaultServiceName, "software", "integer",
true, S2Graph.DefaultServiceName, Nil, Seq(Prop("weight", "0.0", "double")), "strong", None, None)
}
def cleanupDefaultSchema(): Unit = {
val columnNames = Set(S2Graph.DefaultColumnName, "person", "software", "product", "dog",
"animal", "song", "artist", "STEPHEN")
val labelNames = Set(S2Graph.DefaultLabelName, "knows", "created", "bought", "test", "self", "friends", "friend", "hate", "collaborator",
"test1", "test2", "test3", "pets", "walks", "hates", "link",
"codeveloper", "createdBy", "existsWith", "writtenBy", "sungBy", "followedBy", "uses", "likes", "foo", "bar")
columnNames.foreach { columnName =>
Management.deleteColumn(S2Graph.DefaultServiceName, columnName)
}
labelNames.foreach { labelName =>
Management.deleteLabel(labelName)
}
}
}
| SteamShon/incubator-s2graph | s2core/src/main/scala/org/apache/s2graph/core/S2GraphFactory.scala | Scala | apache-2.0 | 8,233 |
package core.parser
import core.BaseSpec
import core.protocol.{Command, SourceCommandParser, LogCommandParser}
class CommandParserSpec extends BaseSpec {
val logParser = new LogCommandParser
val sourceParser = new SourceCommandParser
"Commands can parse" should {
"list command `l` or `L`" in {
val parsedLowercase = sourceParser.parse(0, "l")
val parsedUpperCase = sourceParser.parse(1, "L")
parsedLowercase shouldBe Command.List(0)
parsedUpperCase shouldBe Command.List(1)
}
"from command `f s=1 l=DEBUG` or `F ...`" in {
val parsedLowercase = logParser.parse(0, "f s=1 l=DEBUG")
val parsedUpperCase = logParser.parse(1, "F s=2 l=DEBUG")
parsedLowercase shouldBe Command.From(0, 1, "DEBUG")
parsedUpperCase shouldBe Command.From(1, 2, "DEBUG")
}
}
"commands should return Unknown" should {
"when the first character is invalid" in {
val parsedLog = logParser.parse(0, "asd")
val parsedSource = sourceParser.parse(0, "asd")
parsedLog shouldBe Command.Unknown("asd")
parsedSource shouldBe Command.Unknown("asd")
}
"when from doesn't contain a source id" in {
val missingId = logParser.parse(0, "f s l=DEBUG")
val missingBoth = logParser.parse(0, "f l=DEBUG")
missingId shouldBe Command.Unknown("f s l=DEBUG")
missingBoth shouldBe Command.Unknown("f l=DEBUG")
}
}
}
| muratozkan/reactive-logs | core/src/test/scala/core/parser/CommandParserSpec.scala | Scala | mit | 1,417 |
package eventstore
package core
package operations
import OperationError._
import Inspection.Decision._
private[eventstore] final case class TransactionStartInspection(out: TransactionStart)
extends ErrorInspection[TransactionStartCompleted, OperationError] {
def streamId = out.streamId
def expectedVersion = out.expectedVersion
def decision(error: OperationError) = {
error match {
case PrepareTimeout => Retry
case CommitTimeout => Retry
case ForwardTimeout => Retry
case WrongExpectedVersion => Fail(wrongExpectedVersion)
case StreamDeleted => Fail(streamDeletedException)
case InvalidTransaction => Fail(InvalidTransactionException)
case AccessDenied => Fail(AccessDeniedException(s"Write access denied for $streamId"))
}
}
def wrongExpectedVersion = {
val msg = s"Transaction start failed due to WrongExpectedVersion: $streamId, $expectedVersion"
new WrongExpectedVersionException(msg)
}
def streamDeletedException = {
new StreamDeletedException(s"Transaction start failed due to $streamId has been deleted")
}
} | EventStore/EventStore.JVM | core/src/main/scala/eventstore/core/operations/TransactionStartInspection.scala | Scala | bsd-3-clause | 1,149 |
package com.twitter.io
import java.nio.ReadOnlyBufferException
import java.nio.charset.{Charset, StandardCharsets => JChar}
import scala.collection.immutable.VectorBuilder
/**
* Buf represents a fixed, immutable byte buffer with efficient
* positional access. Buffers may be sliced and concatenated,
* and thus be used to implement bytestreams.
*
* @see [[com.twitter.io.Buf.ByteArray]] for an `Array[Byte]` backed
* implementation.
* @see [[com.twitter.io.Buf.ByteBuffer]] for an `nio.ByteBuffer` backed
* implementation.
* @see [[com.twitter.io.Buf.apply]] for creating a `Buf` from other `Bufs`
* @see [[com.twitter.io.Buf.Empty]] for an empty `Buf`.
*/
abstract class Buf { outer =>
// Cached hash code for the Buf.
// Note: there is an opportunity for a race in computing cachedHashCode
// but since the computed hash code is deterministic the worst case
// scenario is duplication of work.
private[this] var cachedHashCode = 0
/**
* Write the entire contents of this `Buf` into the given array at
* the given offset. Partial writes aren't supported directly
* through this API; they can be accomplished by first [[slice slicing]] the
* buffer.
* This method should be preferred over the `Buf.ByteArray.extract`, `Buf.ByteArray.Owned`,
* etc family of functions when you want to control the destination of the data, as
* opposed to letting the `Buf` implementation manage the destination. For example, if you
* want to manually set the first bytes of an `Array[Byte]` and then efficiently copy the
* contents of this `Buf` to the remaining space.
*
* @see [[write(ByteBuffer)]] for writing to nio buffers.
*
* @throws IllegalArgumentException when `output` is too small to
* contain all the data.
*
* @note [[Buf]] implementors should use the helper [[checkWriteArgs]].
*/
@throws(classOf[IllegalArgumentException])
def write(output: Array[Byte], off: Int): Unit
/**
* Write the entire contents of this `Buf` into the given nio buffer.
* Partial writes aren't supported directly through this API; they can be
* accomplished by first [[slice slicing]] the buffer.
* This method should be preferred over `Buf.ByteBuffer.extract`, `Buf.ByteArray.Owned`,
* etc family of functions when you want to control the destination of the data, as
* opposed to letting the `Buf` implementation manage the destination. For example, if
* the data is destined for an IO operation, it may be preferable to provide a direct
* nio `ByteBuffer` to ensure the avoidance of intermediate heap-based representations.
*
* @see [[write(Array[Byte], Int)]] for writing to byte arrays.
*
* @throws java.lang.IllegalArgumentException when `output` doesn't have enough
* space as defined by `ByteBuffer.remaining()` to hold the contents of this `Buf`.
*
* @throws ReadOnlyBufferException if the provided buffer is read-only.
*
* @note [[Buf]] implementors should use the helper [[checkWriteArgs]].
*/
@throws(classOf[IllegalArgumentException])
@throws(classOf[ReadOnlyBufferException])
def write(output: java.nio.ByteBuffer): Unit
/**
* The number of bytes in the buffer
*/
def length: Int
/**
* Returns a new buffer representing a slice of this buffer, delimited
* by the indices `from` inclusive and `until` exclusive: `[from, until)`.
* Out of bounds indices are truncated. Negative indices are not accepted.
*
* @note [[Buf]] implementors should use the helpers [[checkSliceArgs]],
* [[isSliceEmpty]], and [[isSliceIdentity]].
*/
def slice(from: Int, until: Int): Buf
/**
* Concatenate this buffer with the given buffer.
*/
final def concat(right: Buf): Buf = {
if (this.isEmpty) right
else if (right.isEmpty) this
else this match {
// This could be much cleaner as a Tuple match, but we want to avoid the allocation.
case left: Buf.Composite.Impl =>
right match {
case right: Buf.Composite.Impl => // (Composite, Composite)
new Buf.Composite.Impl(Buf.fastConcat(left.bs, right.bs), left.length + right.length)
case _ => // (Composite, Buf)
new Buf.Composite.Impl(left.bs :+ right, left.length + right.length)
}
case left =>
right match {
case right: Buf.Composite.Impl => // (Buf, Composite)
new Buf.Composite.Impl(left +: right.bs, left.length + right.length)
case _ => // (Buf, Buf)
new Buf.Composite.Impl(Vector(left, right), left.length + right.length)
}
}
}
/**
* Returns the byte at the given index.
*/
def get(index: Int): Byte
/**
* Process the `Buf` 1-byte at a time using the given
* [[Buf.Processor]], starting at index `0` until
* index [[length]]. Processing will halt if the processor
* returns `false` or after processing the final byte.
*
* @return -1 if the processor processed all bytes or
* the last processed index if the processor returns
* `false`.
*
* @note this mimics the design of Netty 4's
* `io.netty.buffer.ByteBuf.forEachByte`
*/
def process(processor: Buf.Processor): Int =
process(0, length, processor)
/**
* Process the `Buf` 1-byte at a time using the given
* [[Buf.Processor]], starting at index `from` until
* index `until`. Processing will halt if the processor
* returns `false` or after processing the final byte.
*
* @return -1 if the processor processed all bytes or
* the last processed index if the processor returns
* `false`.
* Will return -1 if `from` is greater than or equal to
* `until` or [[length]].
* Will return -1 if `until` is greater than or equal to
* [[length]].
*
* @param from the starting index, inclusive. Must be non-negative.
*
* @param until the ending index, exclusive. Must be non-negative.
*
* @note this mimics the design of Netty 4's
* `io.netty.buffer.ByteBuf.forEachByte`
*/
def process(from: Int, until: Int, processor: Buf.Processor): Int
override def equals(other: Any): Boolean = other match {
case other: Buf => Buf.equals(this, other)
case _ => false
}
final override def hashCode: Int = {
// A magic number of 0 signifies that the hash code has not been computed.
// Note: 0 may be the legitimate hash code of a Buf, in which case the
// hash code will have extra collisions with -1.
if (cachedHashCode == 0) {
val computed = computeHashCode
cachedHashCode = if (computed == 0) -1 else computed
}
cachedHashCode
}
def isEmpty: Boolean = length == 0
/** Helper to support 0-copy coercion to Buf.ByteArray. */
protected def unsafeByteArrayBuf: Option[Buf.ByteArray]
/** May require copying. */
protected def unsafeByteArray: Array[Byte] = unsafeByteArrayBuf match {
case Some(Buf.ByteArray.Owned(bytes, 0, end)) if end == bytes.length =>
bytes
case _ =>
copiedByteArray
}
/** Compute the 32-bit FNV-1 hash code of this buf. */
protected def computeHashCode: Int = Buf.hash(this)
/** Definitely requires copying. */
protected def copiedByteArray: Array[Byte] = {
val bytes = new Array[Byte](length)
write(bytes, 0)
bytes
}
/** Helps implementations validate the arguments to [[slice]]. */
protected[this] def checkSliceArgs(from: Int, until: Int): Unit = {
if (from < 0)
throw new IllegalArgumentException(s"'from' must be non-negative: $from")
if (until < 0)
throw new IllegalArgumentException(s"'until' must be non-negative: $until")
}
/** Helps implementations of [[slice]]. */
protected[this] def isSliceEmpty(from: Int, until: Int): Boolean =
until <= from || from >= length
/** Helps implementations of [[slice]]. */
protected[this] def isSliceIdentity(from: Int, until: Int): Boolean =
from == 0 && until >= length
/** Helps implementations validate the arguments to [[write]]. */
protected[this] def checkWriteArgs(
outputLen: Int,
outputOff: Int
): Unit = {
if (outputOff < 0)
throw new IllegalArgumentException(s"offset must be non-negative: $outputOff")
val len = length
if (len > outputLen - outputOff)
throw new IllegalArgumentException(
s"Output too small, capacity=${outputLen-outputOff}, need=$len")
}
}
/**
* Buf wrapper-types (like Buf.ByteArray and Buf.ByteBuffer) provide Shared and
* Owned APIs, each of which with construction & extraction utilities.
*
* The Owned APIs may provide direct access to a Buf's underlying
* implementation; and so mutating the data structure invalidates a Buf's
* immutability constraint. Users must take care to handle this data
* immutably.
*
* The Shared variants, on the other hand, ensure that the Buf shares no state
* with the caller (at the cost of additional allocation).
*
* Note: There are Java-friendly APIs for this object at `com.twitter.io.Bufs`.
*/
object Buf {
/**
* @return `true` if the processor would like to continue processing
* more bytes and `false` otherwise.
*
* @see [[Buf.process]]
*
* @note this is not a `Function1[Byte, Boolean]` despite very
* much fitting that interface. This was done to avoiding boxing
* of the `Bytes` which was quite squirrely and had an impact on
* performance.
*/
abstract class Processor {
def apply(byte: Byte): Boolean
}
private class NoopBuf extends Buf {
def write(buf: Array[Byte], off: Int): Unit =
checkWriteArgs(buf.length, off)
def write(buffer: java.nio.ByteBuffer): Unit = ()
override val isEmpty = true
def length: Int = 0
def slice(from: Int, until: Int): Buf = {
checkSliceArgs(from, until)
this
}
protected def unsafeByteArrayBuf: Option[Buf.ByteArray] = None
def get(index: Int): Byte =
throw new IndexOutOfBoundsException(s"Index out of bounds: $index")
def process(from: Int, until: Int, processor: Processor): Int = {
checkSliceArgs(from, until)
-1
}
}
/**
* An empty buffer.
*/
val Empty: Buf = new NoopBuf
/**
* Create a `Buf` out of the given `Bufs`.
*/
def apply(bufs: Iterable[Buf]): Buf = {
val builder = Vector.newBuilder[Buf]
var length = 0
bufs.foreach {
case b: Composite.Impl =>
// Guaranteed to be non-empty by construction
length += b.length
builder ++= b.bs
case b =>
val len = b.length
if (len > 0) {
length += len
builder += b
}
}
val filtered = builder.result()
if (length == 0)
Buf.Empty
else if (filtered.size == 1)
filtered.head
else
new Composite.Impl(filtered, length)
}
/**
* A `Buf` which is composed of other `Bufs`.
*
* @see [[Buf.apply]] for creating new instances.
*/
sealed abstract class Composite extends Buf {
def bufs: IndexedSeq[Buf]
protected def computedLength: Int
// the factory method requires non-empty Bufs
override def isEmpty: Boolean = false
def length: Int = computedLength
override def toString: String = s"Buf.Composite(length=$length)"
def write(output: Array[Byte], off: Int): Unit = {
checkWriteArgs(output.length, off)
var offset = off
var i = 0
while (i < bufs.length) {
val buf = bufs(i)
buf.write(output, offset)
offset += buf.length
i += 1
}
}
def write(buffer: java.nio.ByteBuffer): Unit = {
checkWriteArgs(buffer.remaining, 0)
var i = 0
while (i < bufs.length) {
bufs(i).write(buffer)
i += 1
}
}
def slice(from: Int, until: Int): Buf = {
checkSliceArgs(from, until)
if (isSliceEmpty(from, until)) return Buf.Empty
else if (isSliceIdentity(from, until)) return this
var begin = from
var end = until
var start, startBegin, startEnd, finish, finishBegin, finishEnd = -1
var cur = 0
while (cur < bufs.length && finish == -1) {
val buf = bufs(cur)
val len = buf.length
if (begin >= 0 && begin < len) {
start = cur
startBegin = begin
startEnd = end
}
if (end <= len) {
finish = cur
finishBegin = math.max(0, begin)
finishEnd = end
}
begin -= len
end -= len
cur += 1
}
if (start == -1) Buf.Empty
else if (start == finish || (start == (cur - 1) && finish == -1)) {
bufs(start).slice(startBegin, startEnd)
} else if (finish == -1) {
val untrimmedFirst = bufs(start)
val first: Buf =
if (startBegin == 0 && startEnd >= untrimmedFirst.length) null
else untrimmedFirst.slice(startBegin, startEnd)
Buf(
if (first == null) bufs.slice(start, length)
else first +: bufs.slice(start + 1, length))
} else {
val untrimmedFirst = bufs(start)
val first: Buf =
if (startBegin == 0 && startEnd >= untrimmedFirst.length) null
else untrimmedFirst.slice(startBegin, startEnd)
val untrimmedLast = bufs(finish)
val last: Buf =
if (finishBegin == 0 && finishEnd >= untrimmedLast.length) null
else untrimmedLast.slice(finishBegin, finishEnd)
Buf(
if (first == null && last == null) bufs.slice(start, finish + 1)
else if (first == null) bufs.slice(start, finish) :+ last
else if (last == null) first +: bufs.slice(start + 1, finish + 1)
else first +: bufs.slice(start + 1, finish) :+ last)
}
}
protected def unsafeByteArrayBuf: Option[ByteArray] = None
private[this] def equalsIndexed(other: Buf): Boolean = {
var otherIdx = 0
var bufIdx = 0
while (bufIdx < bufs.length) {
val buf = bufs(bufIdx)
val bufLen = buf.length
var byteIdx = 0
while (otherIdx < length && byteIdx < bufLen) {
if (other.get(otherIdx) != buf.get(byteIdx))
return false
byteIdx += 1
otherIdx += 1
}
bufIdx += 1
}
true
}
override def equals(other: Any): Boolean = other match {
case otherBuf: Buf if length == otherBuf.length =>
otherBuf match {
case Composite(otherBufs) =>
// this is 2 nested loops, with the outer loop tracking which
// Buf's they are on. The inner loop compares individual bytes across
// the Bufs "segments".
var otherBufIdx = 0
var bufIdx = 0
var byteIdx = 0
var otherByteIdx = 0
while (bufIdx < bufs.length && otherBufIdx < otherBufs.length) {
val buf = bufs(bufIdx)
val otherB = otherBufs(otherBufIdx)
while (byteIdx < buf.length && otherByteIdx < otherB.length) {
if (buf.get(byteIdx) != otherB.get(otherByteIdx))
return false
byteIdx += 1
otherByteIdx += 1
}
if (byteIdx == buf.length) {
byteIdx = 0
bufIdx += 1
}
if (otherByteIdx == otherB.length) {
otherByteIdx = 0
otherBufIdx += 1
}
}
true
case _ =>
otherBuf.unsafeByteArrayBuf match {
case Some(otherBab) =>
equalsIndexed(otherBab)
case None =>
equalsIndexed(otherBuf)
}
}
case _ =>
false
}
}
object Composite {
def unapply(buf: Composite): Option[IndexedSeq[Buf]] =
Some(buf.bufs)
/** Basic implementation of a [[Buf]] created from n-`Bufs`. */
private[Buf] final class Impl(
val bs: Vector[Buf],
protected val computedLength: Int)
extends Buf.Composite {
// ensure there is a need for a `Composite`
if (bs.length <= 1)
throw new IllegalArgumentException(s"Must have 2 or more bufs: $bs")
if (computedLength <= 0)
throw new IllegalArgumentException(s"Length must be positive: $computedLength")
def bufs: IndexedSeq[Buf] = bs
// the factory method requires non-empty Bufs
override def isEmpty: Boolean = false
def get(index: Int): Byte = {
var bufIdx = 0
var byteIdx = 0
while (bufIdx < bs.length) {
val buf = bs(bufIdx)
val bufLen = buf.length
if (index < byteIdx + bufLen) {
return buf.get(index - byteIdx)
} else {
byteIdx += bufLen
}
bufIdx += 1
}
throw new IndexOutOfBoundsException(s"Index out of bounds: $index")
}
def process(from: Int, until: Int, processor: Processor): Int = {
checkSliceArgs(from, until)
if (isSliceEmpty(from, until)) return -1
var i = 0
var bufIdx = 0
var continue = true
while (continue && i < until && bufIdx < bs.length) {
val buf = bs(bufIdx)
val bufLen = buf.length
if (i + bufLen < from) {
// skip ahead to the right Buf for `from`
bufIdx += 1
i += bufLen
} else {
// ensure we are positioned correctly in the first Buf
var byteIdx =
if (i >= from) 0
else from - i
val endAt = math.min(bufLen, until - i)
while (continue && byteIdx < endAt) {
val byte = buf.get(byteIdx)
if (processor(byte)) {
byteIdx += 1
} else {
continue = false
}
}
bufIdx += 1
i += byteIdx
}
}
if (continue) -1
else i
}
}
}
/**
* A buffer representing an array of bytes.
*/
class ByteArray(
private[Buf] val bytes: Array[Byte],
private[Buf] val begin: Int,
private[Buf] val end: Int)
extends Buf {
def get(index: Int): Byte = {
val off = begin + index
if (off >= end)
throw new IndexOutOfBoundsException(s"Index out of bounds: $index")
bytes(off)
}
def process(from: Int, until: Int, processor: Processor): Int = {
checkSliceArgs(from, until)
if (isSliceEmpty(from, until)) return -1
var i = from
var continue = true
val endAt = math.min(until, length)
while (continue && i < endAt) {
val byte = bytes(begin + i)
if (processor(byte))
i += 1
else
continue = false
}
if (continue) -1
else i
}
def write(buf: Array[Byte], off: Int): Unit = {
checkWriteArgs(buf.length, off)
System.arraycopy(bytes, begin, buf, off, length)
}
def write(buffer: java.nio.ByteBuffer): Unit = {
checkWriteArgs(buffer.remaining, 0)
buffer.put(bytes, begin, length)
}
def slice(from: Int, until: Int): Buf = {
checkSliceArgs(from, until)
if (isSliceEmpty(from, until)) Buf.Empty
else if (isSliceIdentity(from, until)) this
else {
val cap = math.min(until, length)
ByteArray.Owned(bytes, begin+from, math.min(begin+cap, end))
}
}
def length: Int = end - begin
override def toString: String = s"ByteArray(length=$length)"
private[this] def equalsBytes(other: Array[Byte], offset: Int): Boolean = {
var i = 0
while (i < length) {
if (bytes(begin+i) != other(offset+i)) {
return false
}
i += 1
}
true
}
override def equals(other: Any): Boolean = other match {
case c: Buf.Composite =>
c == this
case other: Buf if other.length == length =>
other match {
case ba: Buf.ByteArray =>
equalsBytes(ba.bytes, ba.begin)
case _ =>
other.unsafeByteArrayBuf match {
case Some(bs) =>
equalsBytes(bs.bytes, bs.begin)
case None =>
val processor = new Processor {
private[this] var pos = 0
def apply(b: Byte): Boolean = {
if (b == bytes(begin + pos)) {
pos += 1
true
} else {
false
}
}
}
other.process(processor) == -1
}
}
case _ => false
}
protected def unsafeByteArrayBuf: Option[Buf.ByteArray] = Some(this)
}
object ByteArray {
/**
* Construct a buffer representing the given bytes.
*/
def apply(bytes: Byte*): Buf = Owned(bytes.toArray)
/**
* Safely coerce a buffer to a Buf.ByteArray, potentially without copying its underlying
* data.
*/
def coerce(buf: Buf): Buf.ByteArray = buf match {
case buf: Buf.ByteArray => buf
case _ => buf.unsafeByteArrayBuf match {
case Some(b) => b
case None =>
val bytes = buf.copiedByteArray
new ByteArray(bytes, 0, bytes.length)
}
}
/** Owned non-copying constructors/extractors for Buf.ByteArray. */
object Owned {
/**
* Construct a buffer representing the provided array of bytes
* at the given offsets.
*/
def apply(bytes: Array[Byte], begin: Int, end: Int): Buf =
if (begin == end) Buf.Empty
else new ByteArray(bytes, begin, end)
/** Construct a buffer representing the provided array of bytes. */
def apply(bytes: Array[Byte]): Buf = apply(bytes, 0, bytes.length)
/** Extract the buffer's underlying offsets and array of bytes. */
def unapply(buf: ByteArray): Option[(Array[Byte], Int, Int)] =
Some((buf.bytes, buf.begin, buf.end))
/**
* Get a reference to a Buf's data as an array of bytes.
*
* A copy may be performed if necessary.
*/
def extract(buf: Buf): Array[Byte] = Buf.ByteArray.coerce(buf) match {
case Buf.ByteArray.Owned(bytes, 0, end) if end == bytes.length =>
bytes
case Buf.ByteArray.Shared(bytes) =>
// If the unsafe version included offsets, we need to create a new array
// containing only the relevant bytes.
bytes
}
}
/** Safe copying constructors / extractors for Buf.ByteArray. */
object Shared {
/** Construct a buffer representing a copy of an array of bytes at the given offsets. */
def apply(bytes: Array[Byte], begin: Int, end: Int): Buf =
if (begin == end) Buf.Empty
else {
val copy = java.util.Arrays.copyOfRange(bytes, begin, end-begin)
new ByteArray(copy, 0, end-begin)
}
/** Construct a buffer representing a copy of the entire byte array. */
def apply(bytes: Array[Byte]): Buf = apply(bytes, 0, bytes.length)
/** Extract a copy of the buffer's underlying array of bytes. */
def unapply(ba: ByteArray): Option[Array[Byte]] = Some(ba.copiedByteArray)
/** Get a copy of a a Buf's data as an array of bytes. */
def extract(buf: Buf): Array[Byte] = Buf.ByteArray.coerce(buf).copiedByteArray
}
}
/**
* A buffer representing the remaining bytes in the
* given ByteBuffer. The given buffer will not be
* affected.
*
* Modifications to the ByteBuffer's content will be
* visible to the resulting Buf. The ByteBuffer should
* be immutable in practice.
*/
class ByteBuffer(private[Buf] val underlying: java.nio.ByteBuffer) extends Buf {
def length: Int = underlying.remaining
def get(index: Int): Byte =
underlying.get(underlying.position() + index)
def process(from: Int, until: Int, processor: Processor): Int = {
checkSliceArgs(from, until)
if (isSliceEmpty(from, until)) return -1
val pos = underlying.position()
var i = from
var continue = true
val endAt = math.min(until, length)
while (continue && i < endAt) {
val byte = underlying.get(pos + i)
if (processor(byte))
i += 1
else
continue = false
}
if (continue) -1
else i
}
override def toString: String = s"ByteBuffer(length=$length)"
def write(output: Array[Byte], off: Int): Unit = {
checkWriteArgs(output.length, off)
underlying.duplicate.get(output, off, length)
}
def write(buffer: java.nio.ByteBuffer): Unit = {
checkWriteArgs(buffer.remaining, 0)
buffer.put(underlying.duplicate)
}
def slice(from: Int, until: Int): Buf = {
checkSliceArgs(from, until)
if (isSliceEmpty(from, until)) Buf.Empty
else if (isSliceIdentity(from, until)) this
else {
val dup = underlying.duplicate()
val limit = dup.position + math.min(until, length)
if (dup.limit > limit) dup.limit(limit)
dup.position(dup.position + from)
new ByteBuffer(dup)
}
}
override def equals(other: Any): Boolean = other match {
case ByteBuffer(otherBB) => underlying.equals(otherBB)
case buf: Buf => Buf.equals(this, buf)
case _ => false
}
protected def unsafeByteArrayBuf: Option[Buf.ByteArray] =
if (underlying.hasArray) {
val array = underlying.array
val begin = underlying.arrayOffset + underlying.position
val end = begin + underlying.remaining
Some(new ByteArray(array, begin, end))
} else None
}
object ByteBuffer {
/** Extract a read-only view of the underlying [[java.nio.ByteBuffer]]. */
def unapply(buf: ByteBuffer): Option[java.nio.ByteBuffer] =
Some(buf.underlying.asReadOnlyBuffer)
/** Coerce a generic buffer to a Buf.ByteBuffer, potentially without copying data. */
def coerce(buf: Buf): ByteBuffer = buf match {
case buf: ByteBuffer => buf
case _ =>
val bb = buf.unsafeByteArrayBuf match {
case Some(ByteArray.Owned(bytes, begin, end)) =>
java.nio.ByteBuffer.wrap(bytes, begin, end-begin)
case None =>
java.nio.ByteBuffer.wrap(buf.copiedByteArray)
}
new ByteBuffer(bb)
}
/** Owned non-copying constructors/extractors for Buf.ByteBuffer. */
object Owned {
// N.B. We cannot use ByteBuffer.asReadOnly to ensure correctness because
// it prevents direct access to its underlying byte array.
/**
* Create a Buf.ByteBuffer by directly wrapping the provided [[java.nio.ByteBuffer]].
*/
def apply(bb: java.nio.ByteBuffer): Buf =
if (bb.remaining == 0) Buf.Empty
else new ByteBuffer(bb)
/** Extract the buffer's underlying [[java.nio.ByteBuffer]]. */
def unapply(buf: ByteBuffer): Option[java.nio.ByteBuffer] = Some(buf.underlying)
/**
* Get a reference to a Buf's data as a ByteBuffer.
*
* A copy may be performed if necessary.
*/
def extract(buf: Buf): java.nio.ByteBuffer = Buf.ByteBuffer.coerce(buf).underlying
}
/** Safe copying constructors/extractors for Buf.ByteBuffer. */
object Shared {
private[this] def copy(orig: java.nio.ByteBuffer): java.nio.ByteBuffer = {
val copy = java.nio.ByteBuffer.allocate(orig.remaining)
copy.put(orig.duplicate)
copy.flip()
copy
}
def apply(bb: java.nio.ByteBuffer): Buf = Owned(copy(bb))
def unapply(buf: ByteBuffer): Option[java.nio.ByteBuffer] = Owned.unapply(buf).map(copy)
def extract(buf: Buf): java.nio.ByteBuffer = copy(Owned.extract(buf))
}
}
/**
* Byte equality between two buffers.
*/
def equals(x: Buf, y: Buf): Boolean = {
if (x eq y) return true
val len = x.length
if (len != y.length) return false
// Prefer Composite's equals implementation to minimize overhead
x match {
case _: Composite => return x == y
case _ => ()
}
y match {
case _: Composite => return y == x
case _ => ()
}
val processor = new Processor {
private[this] var pos = 0
def apply(b: Byte): Boolean = {
if (b == y.get(pos)) {
pos += 1
true
} else {
false
}
}
}
x.process(processor) == -1
}
/** The 32-bit FNV-1 of Buf */
def hash(buf: Buf): Int = finishHash(hashBuf(buf))
// Adapted from util-hashing.
private[this] val UintMax: Long = 0xFFFFFFFFL
private[this] val Fnv1a32Prime: Int = 16777619
private[this] val Fnv1a32Init: Long = 0x811c9dc5L
private[this] def finishHash(hash: Long): Int = (hash & UintMax).toInt
private[this] def hashBuf(buf: Buf, init: Long = Fnv1a32Init): Long = buf match {
case b if b.isEmpty => init
case c: Buf.Composite =>
var i = 0
var h = init
while (i < c.bufs.length) {
h = hashBuf(c.bufs(i), h)
i += 1
}
h
case _ =>
// use an explicit class in order to have fast access to `hash`
// without boxing.
class HashingProcessor(var hash: Long) extends Processor {
def apply(byte: Byte): Boolean = {
hash = (hash ^ (byte & 0xff)) * Fnv1a32Prime
true
}
}
val processor = new HashingProcessor(init)
buf.process(processor)
processor.hash
}
/**
* Return a string representing the buffer
* contents in hexadecimal.
*/
def slowHexString(buf: Buf): String = {
val len = buf.length
val digits = new StringBuilder(2 * len)
var i = 0
while (i < len) {
digits ++= f"${buf.get(i)}%02x"
i += 1
}
digits.toString
}
/**
* Create and deconstruct Utf-8 encoded buffers.
*
* @note Malformed and unmappable input is silently replaced
* see [[java.nio.charset.CodingErrorAction.REPLACE]]
*
* @note See `com.twitter.io.Bufs.UTF_8` for a Java-friendly API.
*/
object Utf8 extends StringCoder(JChar.UTF_8)
/**
* Create and deconstruct 16-bit UTF buffers.
*
* @note Malformed and unmappable input is silently replaced
* see [[java.nio.charset.CodingErrorAction.REPLACE]]
*
* @note See `com.twitter.io.Bufs.UTF_16` for a Java-friendly API.
*/
object Utf16 extends StringCoder(JChar.UTF_16)
/**
* Create and deconstruct buffers encoded by the 16-bit UTF charset
* with big-endian byte order.
*
* @note Malformed and unmappable input is silently replaced
* see [[java.nio.charset.CodingErrorAction.REPLACE]]
*
* @note See `com.twitter.io.Bufs.UTF_16BE` for a Java-friendly API.
*/
object Utf16BE extends StringCoder(JChar.UTF_16BE)
/**
* Create and deconstruct buffers encoded by the 16-bit UTF charset
* with little-endian byte order.
*
* @note Malformed and unmappable input is silently replaced
* see [[java.nio.charset.CodingErrorAction.REPLACE]]
*
* @note See `com.twitter.io.Bufs.UTF_16LE` for a Java-friendly API.
*/
object Utf16LE extends StringCoder(JChar.UTF_16LE)
/**
* Create and deconstruct buffers encoded by the
* ISO Latin Alphabet No. 1 charset.
*
* @note Malformed and unmappable input is silently replaced
* see [[java.nio.charset.CodingErrorAction.REPLACE]]
*
* @note See `com.twitter.io.Bufs.ISO_8859_1` for a Java-friendly API.
*/
object Iso8859_1 extends StringCoder(JChar.ISO_8859_1)
/**
* Create and deconstruct buffers encoded by the 7-bit ASCII,
* also known as ISO646-US or the Basic Latin block of the
* Unicode character set.
*
* @note Malformed and unmappable input is silently replaced
* see [[java.nio.charset.CodingErrorAction.REPLACE]]
*
* @note See `com.twitter.io.Bufs.US_ASCII` for a Java-friendly API.
*/
object UsAscii extends StringCoder(JChar.US_ASCII)
/**
* A [[StringCoder]] for a given [[java.nio.charset.Charset]] provides an
* [[apply(String) encoder]]: `String` to [[Buf]]
* and an [[unapply(Buf) extractor]]: [[Buf]] to `Option[String]`.
*
* @note Malformed and unmappable input is silently replaced
* see [[java.nio.charset.CodingErrorAction.REPLACE]]
*
* @see [[Utf8]] for UTF-8 encoding and decoding, and `Bufs.UTF8` for
* Java users. Constants exist for other standard charsets as well.
*/
abstract class StringCoder(charset: Charset) {
/**
* Encode the String to its Buf representation per the charset
*/
def apply(s: String): Buf =
// Note: this was faster than `String.getBytes(Charset)` in JMH tests
Buf.ByteArray.Owned(s.getBytes(charset.name))
/**
* @return Some(String representation of the Buf)
* @note This extractor does *not* return None to indicate a failed
* or impossible decoding. Malformed or unmappable bytes will
* instead be silently replaced by the replacement character
* ("\\uFFFD") in the returned String. This behavior may change
* in the future.
*/
def unapply(buf: Buf): Option[String] =
Some(new String(Buf.ByteArray.Owned.extract(buf), charset.name))
}
/**
* Create and deconstruct unsigned 32-bit
* big endian encoded buffers.
*
* Deconstructing will return the value
* as well as the remaining buffer.
*/
object U32BE {
def apply(i: Int): Buf = {
val arr = new Array[Byte](4)
arr(0) = ((i >> 24) & 0xff).toByte
arr(1) = ((i >> 16) & 0xff).toByte
arr(2) = ((i >> 8) & 0xff).toByte
arr(3) = ( i & 0xff).toByte
ByteArray.Owned(arr)
}
def unapply(buf: Buf): Option[(Int, Buf)] =
if (buf.length < 4) None else {
val arr = new Array[Byte](4)
buf.slice(0, 4).write(arr, 0)
val rem = buf.slice(4, buf.length)
val value =
((arr(0) & 0xff) << 24) |
((arr(1) & 0xff) << 16) |
((arr(2) & 0xff) << 8) |
(arr(3) & 0xff )
Some((value, rem))
}
}
/**
* Create and deconstruct unsigned 64-bit
* big endian encoded buffers.
*
* Deconstructing will return the value
* as well as the remaining buffer.
*/
object U64BE {
def apply(l: Long): Buf = {
val arr = new Array[Byte](8)
arr(0) = ((l >> 56) & 0xff).toByte
arr(1) = ((l >> 48) & 0xff).toByte
arr(2) = ((l >> 40) & 0xff).toByte
arr(3) = ((l >> 32) & 0xff).toByte
arr(4) = ((l >> 24) & 0xff).toByte
arr(5) = ((l >> 16) & 0xff).toByte
arr(6) = ((l >> 8) & 0xff).toByte
arr(7) = ( l & 0xff).toByte
ByteArray.Owned(arr)
}
def unapply(buf: Buf): Option[(Long, Buf)] =
if (buf.length < 8) None else {
val arr = new Array[Byte](8)
buf.slice(0, 8).write(arr, 0)
val rem = buf.slice(8, buf.length)
val value =
((arr(0) & 0xff).toLong << 56) |
((arr(1) & 0xff).toLong << 48) |
((arr(2) & 0xff).toLong << 40) |
((arr(3) & 0xff).toLong << 32) |
((arr(4) & 0xff).toLong << 24) |
((arr(5) & 0xff).toLong << 16) |
((arr(6) & 0xff).toLong << 8) |
(arr(7) & 0xff).toLong
Some((value, rem))
}
}
/**
* Create and deconstruct unsigned 32-bit
* little endian encoded buffers.
*
* Deconstructing will return the value
* as well as the remaining buffer.
*/
object U32LE {
def apply(i: Int): Buf = {
val arr = new Array[Byte](4)
arr(0) = ( i & 0xff).toByte
arr(1) = ((i >> 8) & 0xff).toByte
arr(2) = ((i >> 16) & 0xff).toByte
arr(3) = ((i >> 24) & 0xff).toByte
ByteArray.Owned(arr)
}
def unapply(buf: Buf): Option[(Int, Buf)] =
if (buf.length < 4) None else {
val arr = new Array[Byte](4)
buf.slice(0, 4).write(arr, 0)
val rem = buf.slice(4, buf.length)
val value =
( arr(0) & 0xff ) |
((arr(1) & 0xff) << 8) |
((arr(2) & 0xff) << 16) |
((arr(3) & 0xff) << 24)
Some((value, rem))
}
}
/**
* Create and deconstruct unsigned 64-bit
* little endian encoded buffers.
*
* Deconstructing will return the value
* as well as the remaining buffer.
*/
object U64LE {
def apply(l: Long): Buf = {
val arr = new Array[Byte](8)
arr(0) = ( l & 0xff).toByte
arr(1) = ((l >> 8) & 0xff).toByte
arr(2) = ((l >> 16) & 0xff).toByte
arr(3) = ((l >> 24) & 0xff).toByte
arr(4) = ((l >> 32) & 0xff).toByte
arr(5) = ((l >> 40) & 0xff).toByte
arr(6) = ((l >> 48) & 0xff).toByte
arr(7) = ((l >> 56) & 0xff).toByte
ByteArray.Owned(arr)
}
def unapply(buf: Buf): Option[(Long, Buf)] =
if (buf.length < 8) None else {
val arr = new Array[Byte](8)
buf.slice(0, 8).write(arr, 0)
val rem = buf.slice(8, buf.length)
val value =
(arr(0) & 0xff).toLong |
((arr(1) & 0xff).toLong << 8) |
((arr(2) & 0xff).toLong << 16) |
((arr(3) & 0xff).toLong << 24) |
((arr(4) & 0xff).toLong << 32) |
((arr(5) & 0xff).toLong << 40) |
((arr(6) & 0xff).toLong << 48) |
((arr(7) & 0xff).toLong << 56)
Some((value, rem))
}
}
// Modeled after the Vector.++ operator, but use indexes instead of `for`
// comprehensions for the optimized paths to save allocations and time.
// See Scala ticket SI-7725 for details.
private def fastConcat(head: Vector[Buf], tail: Vector[Buf]): Vector[Buf] = {
val tlen = tail.length
val hlen = head.length
if (tlen <= TinyIsFaster || tlen < (hlen / ConcatFasterFactor)) {
var i = 0
var acc = head
while (i < tlen) {
acc :+= tail(i)
i += 1
}
acc
} else if (hlen <= TinyIsFaster || hlen < (tlen / ConcatFasterFactor)) {
var i = head.length - 1
var acc = tail
while (i >= 0) {
acc = head(i) +: acc
i -= 1
}
acc
} else {
val builder = new VectorBuilder[Buf]
val headIt = head.iterator
while(headIt.hasNext) builder += headIt.next()
val tailIt = tail.iterator
while(tailIt.hasNext) builder += tailIt.next()
builder.result()
}
}
// Length at which eC append/prepend operations are always faster
private[this] val TinyIsFaster = 2
// Length ratio at which eC append/prepend operations are slower than making a new collection
private[this] val ConcatFasterFactor = 32
}
| BuoyantIO/twitter-util | util-core/src/main/scala/com/twitter/io/Buf.scala | Scala | apache-2.0 | 38,782 |
package pw.anisimov.adverto.api
import java.util.UUID
import akka.actor.ActorRef
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.model.headers.{`Access-Control-Allow-Credentials`, `Access-Control-Allow-Headers`, `Access-Control-Max-Age`}
import akka.http.scaladsl.model.{HttpHeader, HttpResponse}
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.ExceptionHandler
import akka.pattern.ask
import akka.stream.ActorMaterializer
import akka.util.Timeout
import pw.anisimov.adverto.data.CarAdvertsPersistor.{DeleteAdvert, GetAdvert, GetAdverts}
import pw.anisimov.adverto.data.model.CarAdvert
import scala.concurrent.ExecutionContext
import scala.language.postfixOps
trait AdvertsRoute extends AdvertoJsonProtocol with CorsSupport {
implicit val timeout: Timeout
implicit val dispatcher: ExecutionContext
implicit val materializer: ActorMaterializer
val dataActor: ActorRef
override val corsAllowOrigins: List[String] = List("*")
override val corsAllowedHeaders: List[String] = List("Origin", "X-Requested-With", "Content-Type", "Accept",
"Accept-Encoding", "Accept-Language", "Host", "Referer", "User-Agent")
override val corsAllowCredentials: Boolean = true
override val optionsCorsHeaders: List[HttpHeader] = List[HttpHeader](
`Access-Control-Allow-Headers`(corsAllowedHeaders.mkString(", ")),
`Access-Control-Max-Age`(60 * 60 * 24 * 20),
`Access-Control-Allow-Credentials`(corsAllowCredentials)
)
val advertsExceptionHandler = ExceptionHandler {
case _: IllegalArgumentException =>
complete(HttpResponse(BadRequest, entity = "Old cars should have registration and mileage"))
case _: java.util.NoSuchElementException =>
complete(HttpResponse(NotFound))
}
val advertsRoute = {
cors {
handleExceptions(advertsExceptionHandler) {
pathPrefix("advert" / JavaUUID) { uuid =>
pathEnd {
get {
complete {
(dataActor ? GetAdvert(uuid)).map(ca => ca.asInstanceOf[Option[CarAdvert]].get)
}
} ~
delete {
complete {
OK -> (dataActor ? DeleteAdvert(uuid)).map(ca => ca.asInstanceOf[Option[UUID]].get.toString)
}
} ~
put {
decodeRequest {
entity(as[CarAdvert]) { ca =>
complete {
OK -> (dataActor ? ca.copy(id = Some(uuid))).map(ca => ca.asInstanceOf[Option[UUID]].get.toString)
}
}
}
}
}
} ~
path("advert") {
post {
decodeRequest {
entity(as[CarAdvert]) { nca =>
complete {
Created -> (dataActor ? nca.copy(id = None)).map(ca => ca.asInstanceOf[Option[UUID]].get.toString)
}
}
}
} ~
get {
parameter('sort.as[String] ?) { sorting =>
complete {
(dataActor ? GetAdverts(sorting.getOrElse("id"))).mapTo[Array[CarAdvert]]
}
}
}
}
}
}
}
} | yoks/adverto | src/main/scala/pw/anisimov/adverto/api/AdvertsRoute.scala | Scala | apache-2.0 | 3,276 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.