code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Copyright © 2015-2019 the contributors (see Contributors.md).
*
* This file is part of Knora.
*
* Knora is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Knora is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with Knora. If not, see <http://www.gnu.org/licenses/>.
*/
package org.knora.webapi.annotation
/**
* Creates the ServerUnique annotation.
*
* Marks values which need to be unique on the level of the SERVER.
*
*/
class ServerUnique() extends scala.annotation.StaticAnnotation | musicEnfanthen/Knora | webapi/src/main/scala/org/knora/webapi/annotation/ServerUnique.scala | Scala | agpl-3.0 | 994 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This file is part of Rudder.
*
* Rudder is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU General Public License version 3, the copyright holders add
* the following Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General
* Public License version 3, when you create a Related Module, this
* Related Module is not considered as a part of the work and may be
* distributed under the license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* Rudder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Rudder. If not, see <http://www.gnu.org/licenses/>.
*
*************************************************************************************
*/
package com.normation.rudder.domain.categories
import com.normation.inventory.domain._
import com.normation.inventory.ldap.core._
import com.normation.rudder.domain.RudderLDAPConstants.A_TECHNIQUE_CATEGORY_UUID
import com.unboundid.ldap.sdk.DN
import com.normation.utils.HashcodeCaching
/**
* A category in the LDAP.
* It's a really simple class, with the same
* role as an ou, but even simpler.
*
*/
case class CategoryUuid(val value:String) extends Uuid with HashcodeCaching
case class CaetgoryEntity(
val uuid:Option[CategoryUuid],
val description:Option[String],
val name:Option[String]
) extends HashcodeCaching
/**
* This trait is used for Technique Category, and for Node Group Category
* ID : the class identifying the category
* C The category class
* T The item class
*/
trait ItemCategory[CAT_ID,ITEM_ID] {
def id:CAT_ID
def name : String
def description : String
def children: List[CAT_ID]
def items : List[ITEM_ID]
def isSystem : Boolean
override def toString() = "%s(%s)".format(name, id)
}
| armeniaca/rudder | rudder-core/src/main/scala/com/normation/rudder/domain/categories/CategoryEntity.scala | Scala | gpl-3.0 | 2,735 |
package spark
import java.io._
class JavaSerializationStream(out: OutputStream) extends SerializationStream {
val objOut = new ObjectOutputStream(out)
def writeObject[T](t: T) { objOut.writeObject(t) }
def flush() { objOut.flush() }
def close() { objOut.close() }
}
class JavaDeserializationStream(in: InputStream) extends DeserializationStream {
val objIn = new ObjectInputStream(in) {
override def resolveClass(desc: ObjectStreamClass) =
Class.forName(desc.getName, false, Thread.currentThread.getContextClassLoader)
}
def readObject[T](): T = objIn.readObject().asInstanceOf[T]
def close() { objIn.close() }
}
class JavaSerializerInstance extends SerializerInstance {
def serialize[T](t: T): Array[Byte] = {
val bos = new ByteArrayOutputStream()
val out = outputStream(bos)
out.writeObject(t)
out.close()
bos.toByteArray
}
def deserialize[T](bytes: Array[Byte]): T = {
val bis = new ByteArrayInputStream(bytes)
val in = inputStream(bis)
in.readObject().asInstanceOf[T]
}
def outputStream(s: OutputStream): SerializationStream = {
new JavaSerializationStream(s)
}
def inputStream(s: InputStream): DeserializationStream = {
new JavaDeserializationStream(s)
}
}
class JavaSerializer extends Serializer {
def newInstance(): SerializerInstance = new JavaSerializerInstance
}
| jperla/spark-advancers | core/src/main/scala/spark/JavaSerializer.scala | Scala | bsd-3-clause | 1,366 |
package dotty.tools
package dotc
package parsing
import Tokens._, Parsers._
import dotty.tools.io._
import util._
import core._
import ast.Trees._
import ast.desugar
import ast.desugar._
import core.Mode
import Contexts.Context
import scala.collection.mutable.ListBuffer
class DeSugarTest extends ParserTest {
import dotty.tools.dotc.ast.untpd._
import Mode._
val Expr = Mode(0)
object DeSugar extends UntypedTreeMap {
var curMode: Mode = Expr
def withMode[T](mode: Mode)(op: => T) = {
val saved = curMode
curMode = mode
try op
finally curMode = saved
}
def transform(tree: Tree, mode: Mode)(implicit ctx: Context): Tree = withMode(mode) { transform(tree) }
def transform(trees: List[Tree], mode: Mode)(implicit ctx: Context): List[Tree] = withMode(mode) { transform(trees) }
override def transform(tree: Tree)(implicit ctx: Context): Tree = {
val tree1 = desugar(tree)(ctx.withModeBits(curMode))
tree1 match {
case TypedSplice(t) =>
tree1
case PostfixOp(od, op) =>
PostfixOp(transform(od), op)
case Select(qual, name) =>
cpy.Select(tree1)(transform(qual, Expr), name)
case Apply(fn, args) =>
cpy.Apply(tree1)(transform(fn, Expr), transform(args))
case TypeApply(fn, args) =>
cpy.TypeApply(tree1)(transform(fn, Expr), transform(args, Type))
case New(tpt) =>
cpy.New(tree1)(transform(tpt, Type))
case Typed(expr, tpt) =>
cpy.Typed(tree1)(transform(expr), transform(tpt, Type))
case CaseDef(pat, guard, body) =>
cpy.CaseDef(tree1)(transform(pat, Pattern), transform(guard), transform(body))
case SeqLiteral(elems, elemtpt) =>
cpy.SeqLiteral(tree1)(transform(elems), transform(elemtpt))
case UnApply(fun, implicits, patterns) =>
cpy.UnApply(tree1)(transform(fun, Expr), transform(implicits), transform(patterns))
case tree1 @ ValDef(name, tpt, _) =>
cpy.ValDef(tree1)(name, transform(tpt, Type), transform(tree1.rhs))
case tree1 @ DefDef(name, tparams, vparamss, tpt, _) =>
cpy.DefDef(tree1)(name, transformSub(tparams), vparamss mapConserve (transformSub(_)), transform(tpt, Type), transform(tree1.rhs))
case tree1 @ TypeDef(name, rhs) =>
cpy.TypeDef(tree1)(name, transform(rhs, Type))
case impl @ Template(constr, parents, self, _) =>
cpy.Template(tree1)(transformSub(constr), transform(parents), Nil, transformSub(self), transform(impl.body, Expr))
case Thicket(trees) =>
Thicket(flatten(trees mapConserve super.transform))
case tree1 =>
super.transform(tree1)
}
}
}
def firstClass(stats: List[Tree]): String = stats match {
case Nil => "<empty>"
case TypeDef(name, _) :: _ => name.toString
case ModuleDef(name, _) :: _ => name.toString
case (pdef: PackageDef) :: _ => firstClass(pdef)
case stat :: stats => firstClass(stats)
}
def firstClass(tree: Tree): String = tree match {
case PackageDef(pid, stats) =>
pid.show + "." + firstClass(stats)
case _ => "??? " + tree.getClass
}
def desugarTree(tree: Tree): Tree = {
//println("***** desugaring " + firstClass(tree))
DeSugar.transform(tree)
}
def desugarAll() = parsedTrees foreach (desugarTree(_).show)
}
| som-snytt/dotty | compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala | Scala | apache-2.0 | 3,393 |
package com.eevolution.context.dictionary.infrastructure.repository
import java.util.UUID
import com.eevolution.context.dictionary.domain._
import com.eevolution.context.dictionary.domain.model.MenuTrl
import com.eevolution.context.dictionary.infrastructure.db.DbContext._
import com.eevolution.utils.PaginatedSequence
import com.lightbend.lagom.scaladsl.persistence.jdbc.JdbcSession
import scala.concurrent.{ExecutionContext, Future}
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/EmerisScala
* Created by emeris.hernandez@e-evolution.com , www.e-evolution.com on 07/11/17.
*/
/**
* Menu Trl Repository
* @param session
* @param executionContext
*/
class MenuTrlRepository (session: JdbcSession)(implicit executionContext: ExecutionContext)
extends api.repository.MenuTrlRepository[MenuTrl , Int]
with MenuTrlMapping {
def getById(id: Int): Future[MenuTrl] = {
getByLanguage(id , "en_US")
}
def getByLanguage(id: Int , lang : String): Future[MenuTrl] = {
Future(run(queryMenuTrl.filter(menu => menu.menuId == lift(id)
&& menu.language == lift(lang))).headOption.get)
}
def getByUUID(uuid: UUID): Future[MenuTrl] = {
Future(run(queryMenuTrl.filter(_.uuid == lift(uuid.toString))).headOption.get)
}
def getByMenuTrlId(id : Int) : Future[List[MenuTrl]] = {
Future(run(queryMenuTrl))
}
def getAll() : Future[List[MenuTrl]] = {
Future(run(queryMenuTrl))
}
def getAllByPage(page: Int, pageSize: Int): Future[PaginatedSequence[MenuTrl]] = {
val offset = page * pageSize
val limit = (page + 1) * pageSize
for {
count <- countMenuTrl()
elements <- if (offset > count) Future.successful(Nil)
else selectMenuTrl(offset, limit)
} yield {
PaginatedSequence(elements, page, pageSize, count)
}
}
private def countMenuTrl() = {
Future(run(queryMenuTrl.size).toInt)
}
private def selectMenuTrl(offset: Int, limit: Int): Future[Seq[MenuTrl]] = {
Future(run(queryMenuTrl).drop(offset).take(limit).toSeq)
}
}
| adempiere/ADReactiveSystem | dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/repository/MenuTrlRepository.scala | Scala | gpl-3.0 | 2,817 |
package com.eevolution.context.dictionary.infrastructure.service.impl
import java.util.UUID
import com.eevolution.context.dictionary.infrastructure.repository.WorkflowNodeParaRepository
import com.eevolution.context.dictionary.infrastructure.service.WorkflowNodeParaService
import com.lightbend.lagom.scaladsl.api.ServiceCall
import com.lightbend.lagom.scaladsl.persistence.PersistentEntityRegistry
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/EmerisScala
* Created by emeris.hernandez@e-evolution.com , www.e-evolution.com on 21/11/17.
*/
/**
* Workflow Node Para Service Implementation
* @param registry
* @param workflowNodeParaRepository
*/
class WorkflowNodeParaServiceImpl (registry: PersistentEntityRegistry, workflowNodeParaRepository: WorkflowNodeParaRepository) extends WorkflowNodeParaService {
private val DefaultPageSize = 10
override def getAll() = ServiceCall {_ => workflowNodeParaRepository.getAll()}
override def getAllByPage(page : Option[Int], pageSize : Option[Int]) = ServiceCall{_ => workflowNodeParaRepository.getAllByPage(page.getOrElse(0) , pageSize.getOrElse(DefaultPageSize))}
override def getById(id: Int) = ServiceCall { _ => workflowNodeParaRepository.getById(id)}
override def getByUUID(uuid: UUID) = ServiceCall { _ => workflowNodeParaRepository.getByUUID(uuid)}
}
| adempiere/ADReactiveSystem | dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/service/impl/WorkflowNodeParaServiceImpl.scala | Scala | gpl-3.0 | 2,119 |
package basic.defmacro
import scala.reflect.macros.Context
import scala.collection.mutable.{ListBuffer, Stack}
/**
* 以下の內容そのままを動かすことができます。
*
* http://docs.scala-lang.org/ja/overviews/macros/overview.html
*/
object DefMacroSample01 {
def printf2(format: String, params: Any*): Unit = macro printf2_impl
def printf2_impl(c: Context)(format: c.Expr[String], params: c.Expr[Any]*): c.Expr[Unit] = {
import c.universe._
val Literal(Constant(s_format: String)) = format.tree
val evals = ListBuffer[ValDef]()
def precompute(value: Tree, tpe: Type): Ident = {
val freshName = newTermName(c.fresh("eval$"))
evals += ValDef(Modifiers(), freshName, TypeTree(tpe), value)
Ident(freshName)
}
val paramsStack = Stack[Tree]((params map (_.tree)): _*)
val refs = s_format.split("(?<=%[\\\\w%])|(?=%[\\\\w%])") map {
case "%d" => precompute(paramsStack.pop, typeOf[Int])
case "%s" => precompute(paramsStack.pop, typeOf[String])
case "%%" => Literal(Constant("%"))
case part => Literal(Constant(part))
}
val stats = evals ++ refs.map(ref => reify(print(c.Expr[Any](ref).splice)).tree)
c.Expr[Unit](Block(stats.toList, Literal(Constant(()))))
}
}
| thachi/scala-macro-sample | macro/src/main/scala/basic/defmacro/DefMacroSample01.scala | Scala | apache-2.0 | 1,264 |
/*
* This file is part of Apparat.
*
* Copyright (C) 2010 Joa Ebert
* http://www.joa-ebert.com/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package apparat.lzma
import apparat.sevenzip.compression.lzma.{Encoder => JEncoder}
import java.io.{InputStream => JInputStream, OutputStream => JOutputStream}
/**
* @author Joa Ebert
*/
object LZMA {
private val DictionarySize = 1 << 23
private val Lc = 3
private val Lp = 0
private val Pb = 2
private val Fb = 128
private val Eos = false
private val Algorithm = 2
private val MatchFinder = 1
def encode(inputStream: JInputStream, inputLength: Long, outputStream: JOutputStream) = {
val encoder = new JEncoder()
val eos = Eos
if(!(encoder setAlgorithm Algorithm)) {
throw new LZMAException("Incorrect compression mode.")
}
if(!(encoder setDictionarySize DictionarySize)) {
throw new Exception("Incorrect dictionary size.")
}
if(!(encoder setNumFastBytes Fb)) {
throw new Exception("Incorrect fast-bytes value.")
}
if(!(encoder setMatchFinder MatchFinder)) {
throw new Exception("Incorrect match-finder value.")
}
if(!encoder.setLcLpPb(Lc, Lp, Pb)) {
throw new Exception("Incorrect LC or LP or PB value.")
}
encoder setEndMarkerMode eos
encoder writeCoderProperties outputStream
val fileSize = if(eos) -1 else inputLength
for(i <- 0 until 8) {
outputStream.write((fileSize >>> (8 * i)).asInstanceOf[Int] & 0xFF)
}
encoder.code(inputStream, outputStream, -1, -1, null)
}
}
| joa/apparat | apparat-lzma-encoder/src/main/scala/apparat/lzma/LZMA.scala | Scala | lgpl-2.1 | 2,198 |
package es.uvigo.ei.sing.biomsef
package database
import scala.concurrent.Future
import play.api.Play
import play.api.db.slick.{ DatabaseConfigProvider, HasDatabaseConfig }
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import slick.driver.JdbcProfile
import entity._
import util.Page
trait AnnotationsComponent {
self: ArticlesComponent with KeywordsComponent with AuthorsComponent with ArticleAuthorsComponent with HasDatabaseConfig[JdbcProfile] =>
import driver.api._
class Annotations(tag: Tag) extends Table[Annotation](tag, "annotations") {
def id = column[Annotation.ID]("annotation_id", O.PrimaryKey, O.AutoInc)
def articleId = column[Article.ID]("article_id")
def keywordId = column[Keyword.ID]("keyword_id")
def text = column[String]("annotation_text")
def start = column[Long]("annotation_start")
def end = column[Long]("annotation_end")
def article = foreignKey("annotation_article_fk", articleId, articles)(_.id, onUpdate = ForeignKeyAction.Cascade, onDelete = ForeignKeyAction.Cascade)
def keyword = foreignKey("annotation_keyword_fk", keywordId, keywords)(_.id, onUpdate = ForeignKeyAction.Cascade, onDelete = ForeignKeyAction.Cascade)
def * = (id.?, articleId, keywordId, text, start, end) <> (Annotation.tupled, Annotation.unapply)
}
lazy val annotations = TableQuery[Annotations]
}
final class AnnotationsDAO extends AnnotationsComponent with ArticlesComponent with KeywordsComponent with AuthorsComponent with ArticleAuthorsComponent with HasDatabaseConfig[JdbcProfile] {
import driver.api._
import AnnotationsDAO._
protected val dbConfig = DatabaseConfigProvider.get[JdbcProfile](Play.current)
def count: Future[Int] =
db.run(annotations.length.result)
def count(filter: Filter): Future[Int] =
db.run {
val f1 = annotations.filter(_.text.toLowerCase like filter.text.toLowerCase)
val f2 = filter.articleId.fold(f1)(id => f1.filter(_.articleId === id))
val f3 = filter.keywordId.fold(f2)(id => f2.filter(_.keywordId === id))
f3.length.result
}
def get(id: Annotation.ID): Future[Option[Annotation]] =
db.run(annotations.filter(_.id === id).result.headOption)
def getAnnotatedArticle(id: Article.ID): Future[Option[AnnotatedArticle]] = {
val joined = for {
annotation <- annotations
keyword <- keywords if annotation.keywordId === keyword.id
article <- articles if annotation.articleId === article.id
if article.id === id
} yield (article, keyword, annotation)
val articleAuthors = for {
authoring <- this.articleAuthors
author <- authors if authoring.authorId === author.id && authoring.articleId === id
} yield (author, authoring.position)
val query = joined.sortBy(_._1.id)
val authorQuery = articleAuthors.sortBy(_._2).map(_._1)
val executed = for {
tuples <- db.run(query.result)
authors <- db.run(authorQuery.result)
} yield (tuples, authors)
executed map { case (tuples, authors) =>
val keywords = tuples.map(_._2).toSet
val annotations = tuples.map(_._3).toSet
val article = tuples.headOption.map(_._1)
article.map(a => AnnotatedArticle(a, authors.toList, annotations, keywords))
}
}
def countByKeyword: Future[Map[Keyword.ID, Int]] =
db.run(annotations.groupBy(_.keywordId) map {
case (kid, as) => (kid -> as.length)
} result).map(_.toMap)
def countByArticle: Future[Map[Article.ID, Int]] =
db.run(annotations.groupBy(_.articleId) map {
case (aid, as) => (aid -> as.length)
} result).map(_.toMap)
def countByArticleAndKeyword: Future[Map[(Article.ID, Keyword.ID), Int]] =
db.run(annotations.groupBy(a => (a.articleId, a.keywordId)) map {
case ((aid, kid), as) => ((aid, kid) -> as.length)
} result).map(_.toMap)
def list(page: Int = 0, pageSize: Int = 10, orderBy: OrderBy = OrderByID, filter: Filter = Filter()): Future[Page[Annotation]] = {
val offset = pageSize * page
val query = {
val f1 = annotations.filter(_.text.toLowerCase like filter.text.toLowerCase)
val f2 = filter.articleId.fold(f1)(id => f1.filter(_.articleId === id))
val f3 = filter.keywordId.fold(f2)(id => f2.filter(_.keywordId === id))
f3.sortBy(orderBy.order).drop(offset).take(pageSize)
}
for {
total <- count(filter)
result <- db.run(query.result)
} yield Page(result, page, offset, total)
}
def insert(annotation: Annotation): Future[Annotation] =
db.run {
(annotations returning annotations.map(_.id) into ((annotation, id) => annotation.copy(id = Some(id)))) += annotation
}
def insert(annotations: Annotation*): Future[Seq[Annotation]] =
db.run {
(this.annotations returning this.annotations.map(_.id) into ((annotation, id) => annotation.copy(id = Some(id)))) ++= annotations
}
def update(id: Annotation.ID, annotation: Annotation): Future[Unit] = {
val updated: Annotation = annotation.copy(id = Some(id))
db.run(annotations.filter(_.id === id).update(updated)).map(_ => ())
}
def update(annotation: Annotation): Future[Unit] =
annotation.id.fold(Future.failed[Unit] {
new IllegalArgumentException("It is impossible to update an annotation with empty ID")
})(id => update(id, annotation))
def delete(id: Annotation.ID): Future[Unit] =
db.run(annotations.filter(_.id === id).delete).map(_ => ())
def delete(annotation: Annotation): Future[Unit] =
annotation.id.fold(Future.failed[Unit] {
new IllegalArgumentException("It is impossible to delete an annotation with empty ID")
})(delete)
def deleteAnnotationsOf(id: Article.ID): Future[Unit] =
db.run(annotations.filter(_.articleId === id).delete).map(_ => ())
}
object AnnotationsDAO {
import slick.ast.Ordering
import slick.ast.Ordering.{ Asc, NullsDefault }
import slick.lifted.ColumnOrdered
private type Annotations = AnnotationsComponent#Annotations
final case class Filter (
text: String = "%",
articleId: Option[Article.ID] = None,
keywordId: Option[Keyword.ID] = None
)
sealed trait OrderBy {
type ColumnType
val order: Annotations => ColumnOrdered[ColumnType]
}
case object OrderByID extends OrderBy {
type ColumnType = Long
val order: Annotations => ColumnOrdered[ColumnType] =
annotation => ColumnOrdered(annotation.id, Ordering(Asc, NullsDefault))
}
case object OrderByArticleID extends OrderBy {
type ColumnType = Long
val order: Annotations => ColumnOrdered[ColumnType] =
annotation => ColumnOrdered(annotation.articleId, Ordering(Asc, NullsDefault))
}
case object OrderByKeywordID extends OrderBy {
type ColumnType = Long
val order: Annotations => ColumnOrdered[ColumnType] =
annotation => ColumnOrdered(annotation.keywordId, Ordering(Asc, NullsDefault))
}
case object OrderByText extends OrderBy {
type ColumnType = String
val order: Annotations => ColumnOrdered[ColumnType] =
annotation => ColumnOrdered(annotation.text, Ordering(Asc, NullsDefault))
}
}
| agjacome/biomsef | src/main/scala/database/AnnotationsDAO.scala | Scala | mit | 7,175 |
package com.twitter.finagle.kestrel.protocol
import org.jboss.netty.handler.codec.oneone.OneToOneEncoder
import org.jboss.netty.buffer.ChannelBuffers
import com.twitter.finagle.memcached.util.ChannelBufferUtils._
import org.jboss.netty.channel._
import com.twitter.finagle.memcached.protocol.text.{Decoding, Tokens, TokensWithData, ValueLines}
import org.jboss.netty.util.CharsetUtil
import com.twitter.util.Duration
private[kestrel] class ResponseToEncoding extends OneToOneEncoder {
private[this] val ZERO = "0"
private[this] val VALUE = "VALUE"
private[this] val STORED = "STORED"
private[this] val NOT_FOUND = "NOT_FOUND"
private[this] val DELETED = "DELETED"
private[this] val ERROR = "ERROR"
def encode(ctx: ChannelHandlerContext, ch: Channel, message: AnyRef): Decoding = {
message match {
case Stored() => Tokens(Seq(STORED))
case Deleted() => Tokens(Seq(DELETED))
case NotFound() => Tokens(Seq(NOT_FOUND))
case Error() => Tokens(Seq(ERROR))
case Values(values) =>
val buffer = ChannelBuffers.dynamicBuffer(100 * values.size)
val tokensWithData = values map { case Value(key, value) =>
TokensWithData(Seq(VALUE, key, ZERO), value)
}
ValueLines(tokensWithData)
}
}
}
private[kestrel] class CommandToEncoding extends OneToOneEncoder {
private[this] val ZERO = "0"
private[this] val OPEN = "open"
private[this] val CLOSE = "close"
private[this] val ABORT = "abort"
private[this] val PEEK = "peek"
private[this] val GET = "get"
private[this] val DELETE = "delete"
private[this] val FLUSH = "flush"
private[this] val SET = "set"
// kestrel supports only 32-bit timeouts
private[this] def encodeTimeout(timeout: Option[Duration]) =
timeout map { value =>
"/t=" + math.min(value.inMilliseconds, Int.MaxValue).toString
} getOrElse ""
def encode(ctx: ChannelHandlerContext, ch: Channel, message: AnyRef): Decoding = {
message match {
case Set(key, expiry, value) =>
TokensWithData(Seq(SET, key, ZERO, expiry.inSeconds.toString), value)
case Get(queueName, timeout) =>
var key = queueName.toString(CharsetUtil.US_ASCII)
key += encodeTimeout(timeout)
Tokens(Seq(GET, key))
case Open(queueName, timeout) =>
var key = queueName.toString(CharsetUtil.US_ASCII) + "/open"
key += encodeTimeout(timeout)
Tokens(Seq(GET, key))
case Close(queueName, timeout) =>
var key = queueName.toString(CharsetUtil.US_ASCII) + "/close"
key += encodeTimeout(timeout)
Tokens(Seq(GET, key))
case CloseAndOpen(queueName, timeout) =>
var key = queueName.toString(CharsetUtil.US_ASCII) + "/close/open"
key += encodeTimeout(timeout)
Tokens(Seq(GET, key))
case Abort(queueName, timeout) =>
var key = queueName.toString(CharsetUtil.US_ASCII) + "/abort"
key += encodeTimeout(timeout)
Tokens(Seq(GET, key))
case Peek(queueName, timeout) =>
var key = queueName.toString(CharsetUtil.US_ASCII) + "/peek"
key += encodeTimeout(timeout)
Tokens(Seq(GET, key))
case Delete(key) =>
Tokens(Seq(DELETE, key))
case Flush(key) =>
Tokens(Seq(FLUSH, key))
}
}
}
| firebase/finagle | finagle-kestrel/src/main/scala/com/twitter/finagle/kestrel/protocol/Show.scala | Scala | apache-2.0 | 3,430 |
package im.actor.server.user
import akka.actor.ActorSystem
import akka.http.scaladsl.util.FastFuture
import akka.pattern.pipe
import im.actor.api.rpc.collections._
import im.actor.api.rpc.users.{ ApiFullUser, ApiUser }
import im.actor.server.ApiConversions._
import im.actor.server.acl.ACLUtils
import im.actor.server.dialog.UserAcl
private[user] trait UserQueriesHandlers extends UserAcl {
self: UserProcessor ⇒
import UserQueries._
protected def getAuthIds(state: UserState): Unit =
sender() ! GetAuthIdsResponse(state.authIds)
protected def getApiStruct(state: UserState, clientUserId: Int, clientAuthId: Long)(implicit system: ActorSystem): Unit = {
(for {
localName ← if (clientUserId == state.id || clientUserId == 0)
FastFuture.successful(None)
else
userExt.getLocalName(clientUserId, state.id)
} yield GetApiStructResponse(ApiUser(
id = userId,
accessHash = ACLUtils.userAccessHash(clientAuthId, userId, state.accessSalt),
name = state.name,
localName = UserUtils.normalizeLocalName(localName),
sex = Some(state.sex),
avatar = state.avatar,
isBot = Some(state.isBot),
contactInfo = UserUtils.defaultUserContactRecords(state.phones.toVector, state.emails.toVector, state.socialContacts.toVector),
nick = state.nickname,
about = state.about,
preferredLanguages = state.preferredLanguages.toVector,
timeZone = state.timeZone,
botCommands = state.botCommands,
ext = if (state.ext.nonEmpty) Some(extToApi(state.ext)) else None
))) pipeTo sender()
}
protected def getApiFullStruct(state: UserState, clientUserId: Int, clientAuthId: Long)(implicit system: ActorSystem): Unit = {
(for {
isBlocked ← checkIsBlocked(state.id, clientUserId)
localName ← if (clientUserId == state.id || clientUserId == 0)
FastFuture.successful(None)
else
userExt.getLocalName(clientUserId, state.id)
} yield GetApiFullStructResponse(ApiFullUser(
id = userId,
contactInfo = UserUtils.defaultUserContactRecords(state.phones.toVector, state.emails.toVector, state.socialContacts.toVector),
about = state.about,
preferredLanguages = state.preferredLanguages.toVector,
timeZone = state.timeZone,
botCommands = state.botCommands,
ext = None,
isBlocked = Some(isBlocked)
))) pipeTo sender()
}
protected def getContactRecords(state: UserState): Unit =
sender() ! GetContactRecordsResponse(state.phones, state.emails)
protected def checkAccessHash(state: UserState, senderAuthId: Long, accessHash: Long): Unit =
sender() ! CheckAccessHashResponse(isCorrect = accessHash == ACLUtils.userAccessHash(senderAuthId, userId, state.accessSalt))
protected def getAccessHash(state: UserState, clientAuthId: Long): Unit =
sender() ! GetAccessHashResponse(ACLUtils.userAccessHash(clientAuthId, userId, state.accessSalt))
protected def getUser(state: UserState): Unit = sender() ! state
protected def isAdmin(state: UserState): Unit = sender() ! IsAdminResponse(state.isAdmin.getOrElse(false))
protected def getName(state: UserState): Unit = sender() ! GetNameResponse(state.name)
}
| EaglesoftZJ/actor-platform | actor-server/actor-core/src/main/scala/im/actor/server/user/UserQueriesHandlers.scala | Scala | agpl-3.0 | 3,221 |
package core.algorithms.naive
import core.algorithms.NBodyAlgorithm
import core.models.Body
class NaiveQuadraticMethod extends NBodyAlgorithm {
def updateBodies(bodies: IndexedSeq[Body]): IndexedSeq[Body] = {
if (bodies.isEmpty)
bodies
else
moveBodies(calculateForces(bodies))
}
private def moveBodies(bodies: IndexedSeq[Body]) = bodies.map(_.updateStateVariables().resetForce)
private def calculateForces(bodies: IndexedSeq[Body]) = {
var bodiesMap: Map[Int, Body] = bodies.zipWithIndex.map { case (body, idx) => idx -> body }.toMap
val length = bodies.length
bodies.indices.dropRight(1).foreach { i =>
(i+1 until length).foreach { j =>
val bi = bodiesMap(i)
val bj = bodiesMap(j)
val forceOnBiByBj = bi.forceBetween(bj)
val forceOnBjByBi = forceOnBiByBj * -1
bodiesMap += i -> bi.applyForce(forceOnBiByBj)
bodiesMap += j -> bj.applyForce(forceOnBjByBi)
}
}
bodiesMap.values.toIndexedSeq
}
}
| sumanyu/n-body-akka | src/main/scala/core/algorithms/naive/NaiveQuadraticMethod.scala | Scala | apache-2.0 | 1,011 |
package controllers
import helpers.UnitSpec
import helpers.vrm_assign.CookieFactoryForUnitSpecs.captureCertificateDetailsFormModel
import helpers.vrm_assign.CookieFactoryForUnitSpecs.captureCertificateDetailsModel
import helpers.vrm_assign.CookieFactoryForUnitSpecs.vehicleAndKeeperDetailsModel
import helpers.vrm_assign.CookieFactoryForUnitSpecs.vehicleAndKeeperLookupFormModel
import helpers.TestWithApplication
import pages.vrm_assign.FulfilPage
import play.api.test.FakeRequest
import play.api.test.Helpers.BAD_REQUEST
import play.api.test.Helpers.LOCATION
import play.api.test.Helpers.OK
import uk.gov.dvla.vehicles.presentation.common.mappings.Email.{EmailId, EmailVerifyId}
import uk.gov.dvla.vehicles.presentation.common.testhelpers.CookieHelper.fetchCookiesFromHeaders
import views.vrm_assign.Confirm.ConfirmCacheKey
import views.vrm_assign.Confirm.GranteeConsentId
import views.vrm_assign.Confirm.KeeperEmailId
import views.vrm_assign.Confirm.SupplyEmail_true
import views.vrm_assign.Confirm.SupplyEmailId
import views.vrm_assign.VehicleLookup.UserType_Keeper
import webserviceclients.fakes.ConfirmFormConstants.KeeperEmailValid
class ConfirmUnitSpec extends UnitSpec {
"present" should {
"display the page" in new TestWithApplication {
whenReady(present) { r =>
r.header.status should equal(OK)
}
}
}
"submit" should {
"redirect to next page when the form is completed successfully" in new TestWithApplication {
whenReady(submit) { r =>
r.header.headers.get(LOCATION) should equal(Some(FulfilPage.address))
}
}
"write cookies to the cache when a valid form is submitted" in new TestWithApplication {
whenReady(submit) { r =>
val cookies = fetchCookiesFromHeaders(r)
cookies.map(_.name) should contain(ConfirmCacheKey)
}
}
"return a bad request when the supply email field has nothing selected" in new TestWithApplication {
val request = buildRequest(supplyEmail = supplyEmailEmpty)
.withCookies(
vehicleAndKeeperLookupFormModel(keeperConsent = UserType_Keeper),
vehicleAndKeeperDetailsModel(),
captureCertificateDetailsFormModel(),
captureCertificateDetailsModel()
)
val result = confirm.submit(request)
whenReady(result) { r =>
r.header.status should equal(BAD_REQUEST)
}
}
"return a bad request when the keeper wants to supply an email " +
"and does not provide an email address" in new TestWithApplication {
val request = buildRequest(keeperEmail = keeperEmailEmpty)
.withCookies(
vehicleAndKeeperLookupFormModel(keeperConsent = UserType_Keeper),
vehicleAndKeeperDetailsModel(),
captureCertificateDetailsFormModel(),
captureCertificateDetailsModel()
)
val result = confirm.submit(request)
whenReady(result) { r =>
r.header.status should equal(BAD_REQUEST)
}
}
}
private def confirm = testInjector().getInstance(classOf[Confirm])
private def present = {
val request = FakeRequest()
.withCookies(vehicleAndKeeperDetailsModel())
.withCookies(vehicleAndKeeperLookupFormModel())
.withCookies(captureCertificateDetailsFormModel())
.withCookies(captureCertificateDetailsModel())
confirm.present(request)
}
private def submit = {
val request = buildRequest()
.withCookies(vehicleAndKeeperDetailsModel())
.withCookies(vehicleAndKeeperLookupFormModel())
.withCookies(captureCertificateDetailsFormModel())
.withCookies(captureCertificateDetailsModel())
confirm.submit(request)
}
private val supplyEmailEmpty = ""
private val keeperEmailEmpty = ""
private def buildRequest(keeperEmail: String = KeeperEmailValid, supplyEmail: String = SupplyEmail_true) = {
FakeRequest().withFormUrlEncodedBody(
s"$KeeperEmailId.$EmailId" -> keeperEmail,
s"$KeeperEmailId.$EmailVerifyId" -> keeperEmail,
GranteeConsentId -> "true",
SupplyEmailId -> supplyEmail
)
}
}
| dvla/vrm-assign-online | test/controllers/ConfirmUnitSpec.scala | Scala | mit | 4,078 |
/**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.lang.scala.examples
import scala.concurrent.duration.DurationInt
import scala.language.postfixOps
import org.junit.Test
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.junit.JUnitSuite
import rx.lang.scala._
import rx.lang.scala.schedulers.TestScheduler
import rx.observers.TestObserver
class TestSchedulerExample extends JUnitSuite {
@Test def testInterval() {
val scheduler = TestScheduler()
// Use a Java Observer for Mockito
val observer = mock(classOf[rx.Observer[Long]])
val o = Observable.interval(1 second, scheduler)
// Wrap Java Observer in Scala Observer, then subscribe
val sub = o.subscribe(Observer(new TestObserver(observer)))
verify(observer, never).onNext(0L)
verify(observer, never).onCompleted()
verify(observer, never).onError(any(classOf[Throwable]))
scheduler.advanceTimeTo(2 seconds)
val inOrdr = inOrder(observer)
inOrdr.verify(observer, times(1)).onNext(0L)
inOrdr.verify(observer, times(1)).onNext(1L)
inOrdr.verify(observer, never).onNext(2L)
verify(observer, never).onCompleted()
verify(observer, never).onError(any(classOf[Throwable]))
verify(observer, never).onNext(2L)
sub.unsubscribe()
scheduler.advanceTimeTo(4 seconds)
// after unsubscription we expect no further events
verifyNoMoreInteractions(observer)
}
}
| devisnik/RxJava | language-adaptors/rxjava-scala/src/examples/scala/rx/lang/scala/examples/TestSchedulerExample.scala | Scala | apache-2.0 | 1,996 |
package org.scaladebugger.tool.backend
import java.net.URI
import java.nio.file.Path
import ammonite.util.Bind
import org.scaladebugger.api.debuggers.Debugger
import org.scaladebugger.api.profiles.traits.info.{ThreadGroupInfo, ThreadInfo}
import org.scaladebugger.api.virtualmachines.{DummyScalaVirtualMachine, ScalaVirtualMachine}
case class State(
activeDebugger: Option[Debugger],
scalaVirtualMachines: Seq[ScalaVirtualMachine],
dummyScalaVirtualMachine: DummyScalaVirtualMachine,
activeThread: Option[ThreadInfo],
activeThreadGroup: Option[ThreadGroupInfo],
sourcePaths: Seq[Path],
activeProfileName: String
) {
/**
* Converts this object to a collection of bindings that only contains
* fields that are not empty.
*
* @return The state as a collection of bindings
*/
def toBindings: Seq[Bind[_]] = {
var m: Seq[Bind[_]] = Nil
activeDebugger.foreach(d => m :+= Bind("debugger", d))
activeThread.foreach(t => m :+= Bind("thread", t))
activeThreadGroup.foreach(tg => m :+= Bind("threadGroup", tg))
m :+= Bind("sourcePaths", sourcePaths)
if (scalaVirtualMachines.nonEmpty) m :+= Bind("jvms", scalaVirtualMachines)
else m :+= Bind("jvms", Seq(dummyScalaVirtualMachine))
m
}
}
object State {
/**
* Represents the default state where all values are None/Nil except the
* dummy virtual machine, which is initialized using the default profile.
*/
lazy val Default = newDefault()
/**
* Creates a new state with default values.
*
* @return The new state instance
*/
def newDefault(): State = State(
activeDebugger = None,
scalaVirtualMachines = Nil,
dummyScalaVirtualMachine = DummyScalaVirtualMachine.newInstance(),
activeThread = None,
activeThreadGroup = None,
sourcePaths = Nil,
activeProfileName = Debugger.DefaultProfileName
)
}
| ensime/scala-debugger | scala-debugger-tool/src/main/scala/org/scaladebugger/tool/backend/State.scala | Scala | apache-2.0 | 1,862 |
/*
* Copyright (C) 2012 Pavel Fatin <http://pavelfatin.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.pavelfatin.fs
package internal
package toyfs
import collection.mutable
private class MockChunkStorage(capacity: Int, chunks: MockChunk*) extends ChunkStorage {
private val buffer = mutable.Buffer(chunks: _*)
def size = buffer.length
def get(n: Int) = buffer(n)
def allocate() = {
if (size < capacity) {
val chunk = new MockChunk(size, "")
buffer += chunk
Some(chunk)
} else {
None
}
}
def presentation: String = buffer.map(_.presentation).mkString(" | ")
override def toString = s"${getClass.getSimpleName}($presentation)"
}
| pavelfatin/toyfs | src/test/scala/com/pavelfatin/fs/internal/toyfs/MockChunkStorage.scala | Scala | gpl-3.0 | 1,309 |
package com.ezoky.ezmodel.console
import com.ezoky.ezconsole.ScalaConsole
/**
* @author gweinbach on 27/02/2021
* @since 0.2.0
*/
object EzModellerConsole
extends ScalaConsole(EzModeller.defaultConsoleModule)
with App {
try {
start
}
catch {
case e: Throwable =>
Console.err.println(s"Failed to start console: ${e}")
e.printStackTrace(Console.err)
sys.exit(1)
}
sys.exit(0)
} | ezoky/ezmodel | ezmodel-console/src/main/scala/com/ezoky/ezmodel/console/EzModellerConsole.scala | Scala | gpl-2.0 | 426 |
/*
Copyright 2013 Ilya Lakhin (Илья Александрович Лахин)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package name.lakhin.eliah.projects
package papacarlo.test.utils
import name.lakhin.eliah.projects.papacarlo.{Lexer, Syntax}
final class ErrorMonitor(lexer: Lexer, syntax: Syntax)
extends SyntaxMonitor(lexer, syntax) {
def getResult = syntax
.getErrors
.map(error => " > " + error.description +
(
if (shortOutput) " " + lexer.rangeToString(error.range)
else ":\\n" + lexer.highlight(error.range, Some(10)))
)
.mkString("\\n\\n")
def prepare() {}
def release() {}
} | Eliah-Lakhin/papa-carlo | src/test/scala/name.lakhin.eliah.projects/papacarlo/test/utils/ErrorMonitor.scala | Scala | apache-2.0 | 1,151 |
package redscaler.interpreter
import java.nio.channels.AsynchronousChannelGroup
import fs2.Stream
import fs2.util.syntax._
import fs2.util.{Async, Functor}
import redscaler._
import redscaler.interpreter.ArgConverters._
import redscaler.interpreter.ResponseHandler.handleResponseWithErrorHandling
import redscaler.pubsub.{Message, PubSub, Subscribe, SubscriberResponse}
import redscaler.ByteVector._
class Fs2PubSubInterpreter[F[_]: Functor](connection: Fs2Connection[F])(implicit asyncM: Async[F],
tcpACG: AsynchronousChannelGroup)
extends PubSub.Interp[F] {
import connection._
override def publish(channelName: String, message: Vector[Byte]): F[ErrorOr[Int]] = {
execute(Command.keyCommand("publish", channelName, Seq(message))).map(handleResponseWithErrorHandling {
case IntegerResponse(receiverCount) => receiverCount.toInt
})
}
def subscribe(channelName: String): Stream[F, ErrorOr[SubscriberResponse]] = {
subscribeAndPull(toChunk(Command.keyCommand("subscribe", channelName, Seq.empty)))
}
override def unsubscribe(channelName: String): F[Unit] = ???
}
object SubscriptionResponseHandler {
private val subscribeMsg: Vector[Byte] = stringArgConverter("subscribe")
private val messageMsg: Vector[Byte] = stringArgConverter("message")
val handler: ErrorOr[RedisResponse] => ErrorOr[SubscriberResponse] =
handleResponseWithErrorHandling {
case ArrayResponse(
BulkResponse(Some(`subscribeMsg`)) :: BulkResponse(Some(publishingChannelName)) :: IntegerResponse(
subscribedCount) :: Nil) =>
Subscribe(publishingChannelName.asString, subscribedCount.toInt)
case ArrayResponse(
BulkResponse(Some(`messageMsg`)) :: BulkResponse(Some(publishingChannelName)) :: BulkResponse(
Some(messageContent)) :: Nil) =>
Message(publishingChannelName.asString, messageContent)
}
}
| agustafson/redscaler | fs2/src/main/scala/redscaler/interpreter/Fs2PubSubInterpreter.scala | Scala | apache-2.0 | 1,967 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.3
* @date Sun Mar 12 16:49:17 EDT 2017
* @see LICENSE (MIT style license file).
*
* @see www.jstor.org/stable/3695642?seq=1#page_scan_tab_contents
*/
package scalation.analytics.clusterer
import scala.util.control.Breaks.{breakable, break}
import scala.collection.mutable.{ArrayBuffer, Set}
import scala.math.min
import scalation.linalgebra.{MatrixD, SparseMatrixD}
import scalation.random.RandomVecSample
import scalation.util.SortingI
import Algorithm._
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `TightClusterer` class uses tight clustering to eliminate points that
* do not not fit well in any cluster.
* @param x the vectors/points to be clustered stored as rows of a matrix
* @param k0 the number of clusters to make
* @param kmin the minimum number of clusters to make
* @param s the random number stream (to vary the clusters made)
*/
class TightClusterer (x: MatrixD, k0: Int, kmin: Int, s: Int = 0)
// extends Clusterer
{
private val DEBUG = false // debug flag
private val ratio = 0.7 // subsampling ratio
private val alpha = 0.2 // how far below 1 to set threshold
private val thres = 1 - alpha // membership threshold for high scores
private val beta = 0.9 // similarity threshold
private val b = 10 // number of times to resample
private val q = 7 // number of candidates for each k
private val n = x.dim1 // size of whole sample/population
private val avail = Array.fill(x.dim1)(true) // the not yet tightly clustered data points
private val levels = 3 // number of levels to try
private val clusters = new ArrayBuffer [Set [Int]] ()
private val topClubs = Array.ofDim [ArrayBuffer [Set [Int]]] (levels)
private val mda = Array.ofDim [Double] (n, n)
private val da = Array.ofDim [Double] (n, n)
private val ya = Array.ofDim [Double] (x.dim1, x.dim2)
private val md = new MatrixD (n, n, mda) // mean comembership matrix
private val d = new MatrixD (n, n, da) // comembership matrix for current sample
/*
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a new reandom subsample.
*/
def createSubsample (): (MatrixD, Array [Int]) =
{
val nn = avail.count(_ == true) // the number of available rows (i.e. - rows which haven't been tight clustered yet...)
val ns = (nn * ratio).toInt // size of a random subsample
//println(s"From subsamp ns: $ns")
val sr = 0 until ns // sample range
val strm = (System.currentTimeMillis % 1000).toInt
val rsg = RandomVecSample (nn, ns, strm) // random sample generator
val indexMap = rsg.igen ().toArray // select e.g. 5th, 3rd, 7th // FIX - why toArray
//print(s"indexMap: ${indexMap.deep}")
val subsamp = new MatrixD(indexMap.length,x.dim2) // a matrix to hold the specified vectors from the positions specified by indexMap
val arrayMap = avail.zipWithIndex.map{case (e,i) =>
if(e) i else -1}.filterNot(_ == -1) // the indices of the rows specified in indexMap e.g. 5th => index 7, 3rd => 3, 7th => 9
//val subsamp = x.selectRows (arrayMap) // generate random subsample
//println(s"arrayMap: ${arrayMap.deep}")
for( i <- subsamp.range1 ) {
//println(s"i: $i")
//println(s"indexMap(i): ${indexMap(i)}")
//println(s"arrayMap(indexMap(i)): ${arrayMap(indexMap(i))}")
subsamp(i) = x(arrayMap(indexMap(i))) // fill the subsamp with the rows from x specified by arrayMap e.g. x(5), x(7), x(9)
}
//println (s"subsamp = $subsamp")
(subsamp, indexMap)
} // createSubsample
*/
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a new reandom subsample.
*/
def createSubsample (subsamp: MatrixD): Array [Int] =
{
val nn = avail.count(_ == true) // the number of available rows (i.e. - rows which haven't been tight clustered yet...)
val ns = (nn * ratio).toInt // size of a random subsample
//println(s"From subsamp ns: $ns")
val sr = 0 until ns // sample range
val strm = (System.currentTimeMillis % 1000).toInt
val rsg = RandomVecSample (nn, ns, strm) // random sample generator
val indexMap = rsg.igen ().toArray // select e.g. 5th, 3rd, 7th // FIX - why toArray
//print(s"indexMap: ${indexMap.deep}")
// val subsamp = new MatrixD(indexMap.length,x.dim2) // a matrix to hold the specified vectors from the positions specified by indexMap
val arrayMap = avail.zipWithIndex.map{case (e,i) =>
if(e) i else -1}.filterNot(_ == -1) // the indices of the rows specified in indexMap e.g. 5th => index 7, 3rd => 3, 7th => 9
//val subsamp = x.selectRows (arrayMap) // generate random subsample
//println(s"arrayMap: ${arrayMap.deep}")
for( i <- subsamp.range1 ) {
//println(s"i: $i")
//println(s"indexMap(i): ${indexMap(i)}")
//println(s"arrayMap(indexMap(i)): ${arrayMap(indexMap(i))}")
subsamp(i) = x(arrayMap(indexMap(i))) // fill the subsamp with the rows from x specified by arrayMap e.g. x(5), x(7), x(9)
}
//println (s"subsamp = $subsamp")
indexMap
} // createSubsample
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Computet he mean comembership matrix by averaging results from several subsamples.
*/
def computeMeanComembership (k: Int, y: MatrixD): MatrixD =
{
val unclustered = avail.count(_ == true)
val nn = (unclustered * ratio).toInt
//println(s"nn: $nn")
// val md = new MatrixD (n, n) // mean comembership matrix
val clustr2 = Array.ofDim[Int](n) // to hold the future clustering of our data classified by centroids of some subset sample clustering
// val d = new MatrixD (n, n) // comembership matrix for current sample
md.clear ()
// val y = new MatrixD(nn,x.dim2)
for (l <- 0 until b) {
d.clear () // clear the comembreship matrix
y.clear ()
//println (s"\\n iteration l = $l")
val imap = createSubsample (y) // create a new subsample
//KMeansPPClusterer.permuteStreams ((s+l)%1000)
//val (kmc, clustr) = KMeansPPClusterer (y, k)
val kmc = new KMeansPPClusterer(y,k,s=(s+l)%1000)
val clustr = kmc.cluster () // get the clusters
val cents = kmc.centroids()
//println (s"clustr = ${clustr.deep}, cents: $cents")
for (i <- x.range1 ) clustr2(i) = if( avail(i) ) kmc.classify2(x(i), cents) else -1
if (DEBUG) println(s"clustr2: ${clustr2.deep}")
for (i <- x.range1; j <- x.range1 if (clustr2(i) == clustr2(j) && clustr2(i) >= 0)) {
//println(s"i: $i")
//println(s"j: $j")
d(i, j) = 1.0
}
//println (s"d = $d")
md += d
} // for
md /= b // ratio * b // compute mean
md // return result
} // computeMeanComembership
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Form candidate clusters by collecting points with high average comembership
* scores together in clusters (clubs).
* @param md the mean comembership matrix
*/
def formCandidateClusters (md: MatrixD): ArrayBuffer [Set [Int]] =
{
//println(s"From formCandidateClusters, n: $n")
// I don't think we should be using the available list here...
//val avail = Array.fill (n)(true) // whether a point is available
val clubs = new ArrayBuffer [Set [Int]] () // list of clubs
for (i <- 0 until md.dim1 if avail(i)) {
val club = Set (i) // put i in a club
//avail(i) = false // make i unavailable
for (j <- i until md.dim1 if ( avail(i) && md(i,j) >= thres )) { club += j}//; avail(j) = false }
if( club.size > 1 ) clubs += club
} // for
clubs
} // formCandidateClusters
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Order the clubs (candidate clusters) by size, returning the rank order
* (largest first).
* @param clubs the candidate clusters
*/
def orderBySize (clubs: ArrayBuffer [Set [Int]]): Array [Int] =
{
val sizes = clubs.map (_.size).toArray // record sizes of clubs
new SortingI (sizes).iselsort2 () // indirectly sort by size
} // orderBySize
/*
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Select candidates for tight clusters in the K-means algorithm for a given
* number of clusters 'k'. This corresponds to Algorithm A in the paper/URL.
* @param k the number of clusters
*/
def selectCandidateClusters (k: Int): (ArrayBuffer [Set [Int]], Array [Int]) =
{
println ("AAAAAAAAAAAAAA")
val md = computeMeanComembership (k) // mean comembership
println ("BBBBBBBBBBBBBB")
val clubs = formCandidateClusters (md) // form candidate clusters (clubs)
println ("CCCCCCCCCCCCCC")
val order = orderBySize (clubs) // determine rank order by club size
println ("DDDDDDDDDDDDDD")
if (DEBUG) {
println (s"mean = $md")
println (s"clubs = $clubs")
println (s"order = ${order.deep}")
} // if
(clubs, order)
} // selectCandidateClusters
*/
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Select candidates for tight clusters in the K-means algorithm for a given
* number of clusters 'k'. This corresponds to Algorithm A in the paper/URL.
* @param k the number of clusters
*/
def selectCandidateClusters (k: Int, y: MatrixD): (ArrayBuffer [Set [Int]], Array [Int]) =
{
val md = computeMeanComembership (k,y) // mean comembership
val clubs = formCandidateClusters (md) // form candidate clusters (clubs)
val order = orderBySize (clubs) // determine rank order by club size
if (DEBUG) {
println (s"mean = $md")
println (s"clubs = $clubs")
println (s"order = ${order.deep}")
} // if
(clubs, order)
} // selectCandidateClusters
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Pick the top q clubs based on club size.
* @param clubs all the clubs (candidate clusters)
* @param order the rank order (by club size) of all the clubs
*/
def pickTopQ (clubs: ArrayBuffer [Set [Int]], order: Array [Int]): ArrayBuffer [Set [Int]] =
{
val ml = ArrayBuffer [Set [Int]] ()
for (i <- 0 until min (q, clubs.size)) ml += clubs(order (i))
ml
} // pickTopQ
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the similarity of two clubs as the ratio of the size of their
* intersection to their union.
* @param c1 the first club
* @param c2 the second club
*/
def sim (c1: Set [Int], c2: Set [Int]): Double = (c1 & c1).size / (c1 union c2).size
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find a the first tight and stable cluster from the top candidate clubs.
* To be stable, a club must have a similar club at the next level (next k value).
* @param topClubs the top clubs for each level to be search for stable clusters
*/
def findStable (topClubs: Array [ArrayBuffer [Set [Int]]]): (Int, Set [Int]) =
{
for (lev <- 0 until topClubs.length-1) {
for (c1 <- topClubs (lev); c2 <- topClubs (lev+1)) {
if (sim (c1, c2) >= beta) return (lev+1, c2) // found a stable cluster
} // for
} // for
return (-1, null) // none found
} // findStable
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Given a set of points/vectors, put them in clusters, returning the cluster
* assignment vector. A basic goal is to minimize the sum of the distances
* between points within each cluster.
*/
def cluster (): ArrayBuffer [Set [Int]] =
{
// var done = false
breakable { for (kc <- k0 to kmin by -1) { // iteratively decrement kc (k current value)
//println(s"kc : $kc")
val nn = (avail.count( _ == true) * ratio).toInt
if( nn == 1 ) break
// if( avail.count( _ == true ) == 1 ) break // done = true
// if( !done ) break
val y = new MatrixD(nn, x.dim2, ya)
for (k <- kc until kc + levels) {
y.clear()
val (clubs, order) = selectCandidateClusters (k, y)
topClubs(k-kc) = pickTopQ (clubs, order)
} // for
if (DEBUG) println (s"topClubs = ${topClubs.deep}")
val (lev, stable) = findStable (topClubs) // next stable cluster
if (DEBUG) println (s"(lev, stable) = ($lev, $stable)")
if (lev >= 0) {
clusters += stable // add to stable clusters
//topClubs(lev) -= stable // remove from top clubs (WHY?)
for( i <- topClubs ) i.clear()
for( i <- stable ) avail(i) = false
if( avail.count(_ == true ) == 0 ) break // done = true
} else {
if (DEBUG) println (s"no stable cluster found for kc = $kc: $stable")
} // if
}} // for // breakable
if (DEBUG) println (s"clusters = $clusters")
clusters
} // cluster
} // TightClusterer class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `TightClustererTest` is used to test the `TightClusterer` class.
* > run-main scalation.analytics.clusterer.TightClustererTest
*/
object TightClustererTest extends App
{
val v = new MatrixD ((7, 2), 1.0, 2.0,
2.0, 1.0,
5.0, 4.0,
4.0, 5.0,
9.0, 8.0,
8.0, 9.0,
19.0, 32.0)
val (k0, kmin) = (5,1)
for (s <- 0 until 5) {
println(s"\\n\\n\\n//::::::::::::::::::::::::::\\nTight Cluster test for s = $s\\n//::::::::::::::::::::::::::\\n\\n\\n")
val tcl = new TightClusterer (v, k0, kmin, s)
val clust = tcl.cluster ()
assert(!clust.flatten.contains(6))
} // for
} // TightClustererTest object
| scalation/fda | scalation_1.3/scalation_modeling/src/main/scala/scalation/analytics/clusterer/TightClusterer.scala | Scala | mit | 15,956 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.streaming.ui
import java.{util => ju}
import java.lang.{Long => JLong}
import java.util.{Locale, UUID}
import javax.servlet.http.HttpServletRequest
import scala.collection.JavaConverters._
import scala.xml.{Node, NodeBuffer, Unparsed}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.execution.streaming.state.StateStoreProvider
import org.apache.spark.sql.internal.SQLConf.STATE_STORE_PROVIDER_CLASS
import org.apache.spark.sql.internal.StaticSQLConf.ENABLED_STREAMING_UI_CUSTOM_METRIC_LIST
import org.apache.spark.sql.streaming.ui.UIUtils._
import org.apache.spark.ui.{GraphUIData, JsCollector, UIUtils => SparkUIUtils, WebUIPage}
private[ui] class StreamingQueryStatisticsPage(parent: StreamingQueryTab)
extends WebUIPage("statistics") with Logging {
// State store provider implementation mustn't do any heavyweight initialiation in constructor
// but in its init method.
private val supportedCustomMetrics = StateStoreProvider.create(
parent.parent.conf.get(STATE_STORE_PROVIDER_CLASS)).supportedCustomMetrics
logDebug(s"Supported custom metrics: $supportedCustomMetrics")
private val enabledCustomMetrics =
parent.parent.conf.get(ENABLED_STREAMING_UI_CUSTOM_METRIC_LIST).map(_.toLowerCase(Locale.ROOT))
logDebug(s"Enabled custom metrics: $enabledCustomMetrics")
def generateLoadResources(request: HttpServletRequest): Seq[Node] = {
// scalastyle:off
<script src={SparkUIUtils.prependBaseUri(request, "/static/d3.min.js")}></script>
<link rel="stylesheet" href={SparkUIUtils.prependBaseUri(request, "/static/streaming-page.css")} type="text/css"/>
<script src={SparkUIUtils.prependBaseUri(request, "/static/streaming-page.js")}></script>
<script src={SparkUIUtils.prependBaseUri(request, "/static/structured-streaming-page.js")}></script>
// scalastyle:on
}
override def render(request: HttpServletRequest): Seq[Node] = {
val parameterId = request.getParameter("id")
require(parameterId != null && parameterId.nonEmpty, "Missing id parameter")
val query = parent.store.allQueryUIData.find { uiData =>
uiData.summary.runId.equals(UUID.fromString(parameterId))
}.getOrElse(throw new IllegalArgumentException(s"Failed to find streaming query $parameterId"))
val resources = generateLoadResources(request)
val basicInfo = generateBasicInfo(query)
val content =
resources ++
basicInfo ++
generateStatTable(query)
SparkUIUtils.headerSparkPage(request, "Streaming Query Statistics", content, parent)
}
def generateTimeMap(times: Seq[Long]): Seq[Node] = {
val js = "var timeFormat = {};\\n" + times.map { time =>
val formattedTime = SparkUIUtils.formatBatchTime(time, 1, showYYYYMMSS = false)
s"timeFormat[$time] = '$formattedTime';"
}.mkString("\\n")
<script>{Unparsed(js)}</script>
}
def generateTimeTipStrings(values: Array[(Long, Long)]): Seq[Node] = {
val js = "var timeTipStrings = {};\\n" + values.map { case (batchId, time) =>
val formattedTime = SparkUIUtils.formatBatchTime(time, 1, showYYYYMMSS = false)
s"timeTipStrings[$time] = 'batch $batchId ($formattedTime)';"
}.mkString("\\n")
<script>{Unparsed(js)}</script>
}
def generateFormattedTimeTipStrings(values: Array[(Long, Long)]): Seq[Node] = {
val js = "var formattedTimeTipStrings = {};\\n" + values.map { case (batchId, time) =>
val formattedTime = SparkUIUtils.formatBatchTime(time, 1, showYYYYMMSS = false)
s"""formattedTimeTipStrings["$formattedTime"] = 'batch $batchId ($formattedTime)';"""
}.mkString("\\n")
<script>{Unparsed(js)}</script>
}
def generateTimeToValues(values: Array[(Long, ju.Map[String, JLong])]): Seq[Node] = {
val durationDataPadding = SparkUIUtils.durationDataPadding(values)
val js = "var formattedTimeToValues = {};\\n" + durationDataPadding.map { case (x, y) =>
val s = y.toSeq.sortBy(_._1).map(e => s""""${e._2}"""").mkString("[", ",", "]")
val formattedTime = SparkUIUtils.formatBatchTime(x, 1, showYYYYMMSS = false)
s"""formattedTimeToValues["$formattedTime"] = $s;"""
}.mkString("\\n")
<script>{Unparsed(js)}</script>
}
def generateBasicInfo(uiData: StreamingQueryUIData): Seq[Node] = {
val duration = if (uiData.summary.isActive) {
val durationMs = System.currentTimeMillis() - uiData.summary.startTimestamp
SparkUIUtils.formatDurationVerbose(durationMs)
} else {
withNoProgress(uiData, {
val end = uiData.lastProgress.timestamp
val start = uiData.recentProgress.head.timestamp
SparkUIUtils.formatDurationVerbose(
parseProgressTimestamp(end) - parseProgressTimestamp(start))
}, "-")
}
val name = UIUtils.getQueryName(uiData)
val numBatches = withNoProgress(uiData, { uiData.lastProgress.batchId + 1L }, 0)
<div>Running batches for
<strong>
{duration}
</strong>
since
<strong>
{SparkUIUtils.formatDate(uiData.summary.startTimestamp)}
</strong>
(<strong>{numBatches}</strong> completed batches)
</div>
<br />
<div><strong>Name: </strong>{name}</div>
<div><strong>Id: </strong>{uiData.summary.id}</div>
<div><strong>RunId: </strong>{uiData.summary.runId}</div>
<br />
}
def generateWatermark(
query: StreamingQueryUIData,
minBatchTime: Long,
maxBatchTime: Long,
jsCollector: JsCollector): Seq[Node] = {
// This is made sure on caller side but put it here to be defensive
require(query.lastProgress != null)
if (query.lastProgress.eventTime.containsKey("watermark")) {
val watermarkData = query.recentProgress.flatMap { p =>
val batchTimestamp = parseProgressTimestamp(p.timestamp)
val watermarkValue = parseProgressTimestamp(p.eventTime.get("watermark"))
if (watermarkValue > 0L) {
// seconds
Some((batchTimestamp, ((batchTimestamp - watermarkValue) / 1000.0)))
} else {
None
}
}
if (watermarkData.nonEmpty) {
val maxWatermark = watermarkData.maxBy(_._2)._2
val graphUIDataForWatermark =
new GraphUIData(
"watermark-gap-timeline",
"watermark-gap-histogram",
watermarkData,
minBatchTime,
maxBatchTime,
0,
maxWatermark,
"seconds")
graphUIDataForWatermark.generateDataJs(jsCollector)
// scalastyle:off
<tr>
<td style="vertical-align: middle;">
<div style="width: 160px;">
<div><strong>Global Watermark Gap {SparkUIUtils.tooltip("The gap between batch timestamp and global watermark for the batch.", "right")}</strong></div>
</div>
</td>
<td class="watermark-gap-timeline">{graphUIDataForWatermark.generateTimelineHtml(jsCollector)}</td>
<td class="watermark-gap-histogram">{graphUIDataForWatermark.generateHistogramHtml(jsCollector)}</td>
</tr>
// scalastyle:on
} else {
Seq.empty[Node]
}
} else {
Seq.empty[Node]
}
}
def generateAggregatedStateOperators(
query: StreamingQueryUIData,
minBatchTime: Long,
maxBatchTime: Long,
jsCollector: JsCollector): NodeBuffer = {
// This is made sure on caller side but put it here to be defensive
require(query.lastProgress != null)
if (query.lastProgress.stateOperators.nonEmpty) {
val numRowsTotalData = query.recentProgress.map(p => (parseProgressTimestamp(p.timestamp),
p.stateOperators.map(_.numRowsTotal).sum.toDouble))
val maxNumRowsTotal = numRowsTotalData.maxBy(_._2)._2
val numRowsUpdatedData = query.recentProgress.map(p => (parseProgressTimestamp(p.timestamp),
p.stateOperators.map(_.numRowsUpdated).sum.toDouble))
val maxNumRowsUpdated = numRowsUpdatedData.maxBy(_._2)._2
val memoryUsedBytesData = query.recentProgress.map(p => (parseProgressTimestamp(p.timestamp),
p.stateOperators.map(_.memoryUsedBytes).sum.toDouble))
val maxMemoryUsedBytes = memoryUsedBytesData.maxBy(_._2)._2
val numRowsDroppedByWatermarkData = query.recentProgress
.map(p => (parseProgressTimestamp(p.timestamp),
p.stateOperators.map(_.numRowsDroppedByWatermark).sum.toDouble))
val maxNumRowsDroppedByWatermark = numRowsDroppedByWatermarkData.maxBy(_._2)._2
val graphUIDataForNumberTotalRows =
new GraphUIData(
"aggregated-num-total-state-rows-timeline",
"aggregated-num-total-state-rows-histogram",
numRowsTotalData,
minBatchTime,
maxBatchTime,
0,
maxNumRowsTotal,
"records")
graphUIDataForNumberTotalRows.generateDataJs(jsCollector)
val graphUIDataForNumberUpdatedRows =
new GraphUIData(
"aggregated-num-updated-state-rows-timeline",
"aggregated-num-updated-state-rows-histogram",
numRowsUpdatedData,
minBatchTime,
maxBatchTime,
0,
maxNumRowsUpdated,
"records")
graphUIDataForNumberUpdatedRows.generateDataJs(jsCollector)
val graphUIDataForMemoryUsedBytes =
new GraphUIData(
"aggregated-state-memory-used-bytes-timeline",
"aggregated-state-memory-used-bytes-histogram",
memoryUsedBytesData,
minBatchTime,
maxBatchTime,
0,
maxMemoryUsedBytes,
"bytes")
graphUIDataForMemoryUsedBytes.generateDataJs(jsCollector)
val graphUIDataForNumRowsDroppedByWatermark =
new GraphUIData(
"aggregated-num-rows-dropped-by-watermark-timeline",
"aggregated-num-rows-dropped-by-watermark-histogram",
numRowsDroppedByWatermarkData,
minBatchTime,
maxBatchTime,
0,
maxNumRowsDroppedByWatermark,
"records")
graphUIDataForNumRowsDroppedByWatermark.generateDataJs(jsCollector)
val result =
// scalastyle:off
<tr>
<td style="vertical-align: middle;">
<div style="width: 160px;">
<div><strong>Aggregated Number Of Total State Rows {SparkUIUtils.tooltip("Aggregated number of total state rows.", "right")}</strong></div>
</div>
</td>
<td class={"aggregated-num-total-state-rows-timeline"}>{graphUIDataForNumberTotalRows.generateTimelineHtml(jsCollector)}</td>
<td class={"aggregated-num-total-state-rows-histogram"}>{graphUIDataForNumberTotalRows.generateHistogramHtml(jsCollector)}</td>
</tr>
<tr>
<td style="vertical-align: middle;">
<div style="width: 160px;">
<div><strong>Aggregated Number Of Updated State Rows {SparkUIUtils.tooltip("Aggregated number of updated state rows.", "right")}</strong></div>
</div>
</td>
<td class={"aggregated-num-updated-state-rows-timeline"}>{graphUIDataForNumberUpdatedRows.generateTimelineHtml(jsCollector)}</td>
<td class={"aggregated-num-updated-state-rows-histogram"}>{graphUIDataForNumberUpdatedRows.generateHistogramHtml(jsCollector)}</td>
</tr>
<tr>
<td style="vertical-align: middle;">
<div style="width: 160px;">
<div><strong>Aggregated State Memory Used In Bytes {SparkUIUtils.tooltip("Aggregated state memory used in bytes.", "right")}</strong></div>
</div>
</td>
<td class={"aggregated-state-memory-used-bytes-timeline"}>{graphUIDataForMemoryUsedBytes.generateTimelineHtml(jsCollector)}</td>
<td class={"aggregated-state-memory-used-bytes-histogram"}>{graphUIDataForMemoryUsedBytes.generateHistogramHtml(jsCollector)}</td>
</tr>
<tr>
<td style="vertical-align: middle;">
<div style="width: 160px;">
<div><strong>Aggregated Number Of Rows Dropped By Watermark {SparkUIUtils.tooltip("Accumulates all input rows being dropped in stateful operators by watermark. 'Inputs' are relative to operators.", "right")}</strong></div>
</div>
</td>
<td class={"aggregated-num-rows-dropped-by-watermark-timeline"}>{graphUIDataForNumRowsDroppedByWatermark.generateTimelineHtml(jsCollector)}</td>
<td class={"aggregated-num-rows-dropped-by-watermark-histogram"}>{graphUIDataForNumRowsDroppedByWatermark.generateHistogramHtml(jsCollector)}</td>
</tr>
// scalastyle:on
if (enabledCustomMetrics.nonEmpty) {
result ++= generateAggregatedCustomMetrics(query, minBatchTime, maxBatchTime, jsCollector)
}
result
} else {
new NodeBuffer()
}
}
def generateAggregatedCustomMetrics(
query: StreamingQueryUIData,
minBatchTime: Long,
maxBatchTime: Long,
jsCollector: JsCollector): NodeBuffer = {
val result: NodeBuffer = new NodeBuffer
// This is made sure on caller side but put it here to be defensive
require(query.lastProgress.stateOperators.nonEmpty)
query.lastProgress.stateOperators.head.customMetrics.keySet().asScala
.filter(m => enabledCustomMetrics.contains(m.toLowerCase(Locale.ROOT))).map { metricName =>
val data = query.recentProgress.map(p => (parseProgressTimestamp(p.timestamp),
p.stateOperators.map(_.customMetrics.get(metricName).toDouble).sum))
val max = data.maxBy(_._2)._2
val metric = supportedCustomMetrics.find(_.name.equalsIgnoreCase(metricName)).get
val graphUIData =
new GraphUIData(
s"aggregated-$metricName-timeline",
s"aggregated-$metricName-histogram",
data,
minBatchTime,
maxBatchTime,
0,
max,
"")
graphUIData.generateDataJs(jsCollector)
result ++=
// scalastyle:off
<tr>
<td style="vertical-align: middle;">
<div style="width: 240px;">
<div><strong>Aggregated Custom Metric {s"$metricName"} {SparkUIUtils.tooltip(metric.desc, "right")}</strong></div>
</div>
</td>
<td class={s"aggregated-$metricName-timeline"}>{graphUIData.generateTimelineHtml(jsCollector)}</td>
<td class={s"aggregated-$metricName-histogram"}>{graphUIData.generateHistogramHtml(jsCollector)}</td>
</tr>
// scalastyle:on
}
result
}
def generateStatTable(query: StreamingQueryUIData): Seq[Node] = {
val batchToTimestamps = withNoProgress(query,
query.recentProgress.map(p => (p.batchId, parseProgressTimestamp(p.timestamp))),
Array.empty[(Long, Long)])
val batchTimes = batchToTimestamps.map(_._2)
val minBatchTime =
withNoProgress(query, parseProgressTimestamp(query.recentProgress.head.timestamp), 0L)
val maxBatchTime =
withNoProgress(query, parseProgressTimestamp(query.lastProgress.timestamp), 0L)
val maxRecordRate =
withNoProgress(query, query.recentProgress.map(_.inputRowsPerSecond).max, 0L)
val minRecordRate = 0L
val maxProcessRate =
withNoProgress(query, query.recentProgress.map(_.processedRowsPerSecond).max, 0L)
val minProcessRate = 0L
val maxRows = withNoProgress(query, query.recentProgress.map(_.numInputRows).max, 0L)
val minRows = 0L
val maxBatchDuration = withNoProgress(query, query.recentProgress.map(_.batchDuration).max, 0L)
val minBatchDuration = 0L
val inputRateData = withNoProgress(query,
query.recentProgress.map(p => (parseProgressTimestamp(p.timestamp),
withNumberInvalid { p.inputRowsPerSecond })), Array.empty[(Long, Double)])
val processRateData = withNoProgress(query,
query.recentProgress.map(p => (parseProgressTimestamp(p.timestamp),
withNumberInvalid { p.processedRowsPerSecond })), Array.empty[(Long, Double)])
val inputRowsData = withNoProgress(query,
query.recentProgress.map(p => (parseProgressTimestamp(p.timestamp),
withNumberInvalid { p.numInputRows })), Array.empty[(Long, Double)])
val batchDurations = withNoProgress(query,
query.recentProgress.map(p => (parseProgressTimestamp(p.timestamp),
withNumberInvalid { p.batchDuration })), Array.empty[(Long, Double)])
val operationDurationData = withNoProgress(
query,
query.recentProgress.map { p =>
val durationMs = p.durationMs
// remove "triggerExecution" as it count the other operation duration.
durationMs.remove("triggerExecution")
(parseProgressTimestamp(p.timestamp), durationMs)
},
Array.empty[(Long, ju.Map[String, JLong])])
val jsCollector = new JsCollector
val graphUIDataForInputRate =
new GraphUIData(
"input-rate-timeline",
"input-rate-histogram",
inputRateData,
minBatchTime,
maxBatchTime,
minRecordRate,
maxRecordRate,
"records/sec")
graphUIDataForInputRate.generateDataJs(jsCollector)
val graphUIDataForProcessRate =
new GraphUIData(
"process-rate-timeline",
"process-rate-histogram",
processRateData,
minBatchTime,
maxBatchTime,
minProcessRate,
maxProcessRate,
"records/sec")
graphUIDataForProcessRate.generateDataJs(jsCollector)
val graphUIDataForInputRows =
new GraphUIData(
"input-rows-timeline",
"input-rows-histogram",
inputRowsData,
minBatchTime,
maxBatchTime,
minRows,
maxRows,
"records")
graphUIDataForInputRows.generateDataJs(jsCollector)
val graphUIDataForBatchDuration =
new GraphUIData(
"batch-duration-timeline",
"batch-duration-histogram",
batchDurations,
minBatchTime,
maxBatchTime,
minBatchDuration,
maxBatchDuration,
"ms")
graphUIDataForBatchDuration.generateDataJs(jsCollector)
val graphUIDataForDuration =
new GraphUIData(
"duration-area-stack",
"",
Seq.empty[(Long, Double)],
0L,
0L,
0L,
0L,
"ms")
val table = if (query.lastProgress != null) {
// scalastyle:off
<table id="stat-table" class="table table-bordered" style="width: auto">
<thead>
<tr>
<th style="width: 160px;"></th>
<th style="width: 492px;">Timelines</th>
<th style="width: 350px;">Histograms</th>
</tr>
</thead>
<tbody>
<tr>
<td style="vertical-align: middle;">
<div style="width: 160px;">
<div><strong>Input Rate {SparkUIUtils.tooltip("The aggregate (across all sources) rate of data arriving.", "right")}</strong></div>
</div>
</td>
<td class="timeline">{graphUIDataForInputRate.generateTimelineHtml(jsCollector)}</td>
<td class="histogram">{graphUIDataForInputRate.generateHistogramHtml(jsCollector)}</td>
</tr>
<tr>
<td style="vertical-align: middle;">
<div style="width: 160px;">
<div><strong>Process Rate {SparkUIUtils.tooltip("The aggregate (across all sources) rate at which Spark is processing data.", "right")}</strong></div>
</div>
</td>
<td class="timeline">{graphUIDataForProcessRate.generateTimelineHtml(jsCollector)}</td>
<td class="histogram">{graphUIDataForProcessRate.generateHistogramHtml(jsCollector)}</td>
</tr>
<tr>
<td style="vertical-align: middle;">
<div style="width: 160px;">
<div><strong>Input Rows {SparkUIUtils.tooltip("The aggregate (across all sources) number of records processed in a trigger.", "right")}</strong></div>
</div>
</td>
<td class="timeline">{graphUIDataForInputRows.generateTimelineHtml(jsCollector)}</td>
<td class="histogram">{graphUIDataForInputRows.generateHistogramHtml(jsCollector)}</td>
</tr>
<tr>
<td style="vertical-align: middle;">
<div style="width: 160px;">
<div><strong>Batch Duration {SparkUIUtils.tooltip("The process duration of each batch.", "right")}</strong></div>
</div>
</td>
<td class="timeline">{graphUIDataForBatchDuration.generateTimelineHtml(jsCollector)}</td>
<td class="histogram">{graphUIDataForBatchDuration.generateHistogramHtml(jsCollector)}</td>
</tr>
<tr>
<td style="vertical-align: middle;">
<div style="width: auto;">
<div><strong>Operation Duration {SparkUIUtils.tooltip("The amount of time taken to perform various operations in milliseconds.", "right")}</strong></div>
</div>
</td>
<td class="duration-area-stack" colspan="2">{graphUIDataForDuration.generateAreaStackHtmlWithData(jsCollector, operationDurationData)}</td>
</tr>
{generateWatermark(query, minBatchTime, maxBatchTime, jsCollector)}
{generateAggregatedStateOperators(query, minBatchTime, maxBatchTime, jsCollector)}
</tbody>
</table>
} else {
<div id="empty-streaming-query-message">
<b>No visualization information available.</b>
</div>
// scalastyle:on
}
generateTimeToValues(operationDurationData) ++
generateFormattedTimeTipStrings(batchToTimestamps) ++
generateTimeMap(batchTimes) ++ generateTimeTipStrings(batchToTimestamps) ++
table ++ jsCollector.toHtml
}
}
| shaneknapp/spark | sql/core/src/main/scala/org/apache/spark/sql/streaming/ui/StreamingQueryStatisticsPage.scala | Scala | apache-2.0 | 22,589 |
package se.lu.nateko.cp.meta.core.crypto
import java.util.Arrays
import java.util.Base64
import scala.util.{Try, Success, Failure}
import scala.util.control.NoStackTrace
class Sha256Sum(private val bytes: Array[Byte]) {
import Sha256Sum._
assert(byteLengthCorrect(bytes), byteLengthMessage)
def getBytes: Seq[Byte] = bytes.toSeq
def isTruncated: Boolean = bytes.length < 32
def truncate: Sha256Sum = if(isTruncated) this else new Sha256Sum(bytes.take(18))
def base64: String = Base64.getEncoder.withoutPadding.encodeToString(bytes)
def base64Url: String = Base64.getUrlEncoder.withoutPadding.encodeToString(bytes)
def hex: String = bytes.iterator.map(Sha256Sum.formatByte).mkString
/**
* URL- and filename-friendly id that is sufficiently unique.
* Contains 18 bytes of binary information, base64Url-encoded in 24 symbols.
* Even after upper-casing, truncating to 24 symbols encodes 15.74 bytes of information.
* This is almost as much as UUIDs, which have 16 bytes.
* The amount of combinations (38^24 = 8.22e37) is only 4.14 times less than for a random UUID.
* The number of combinations of a single symbol after upper-casing is 38 = 64 - 26 .
*/
def id: String = base64Url.substring(0, Sha256Sum.IdLength)
override def equals(other: Any): Boolean =
if(other.isInstanceOf[Sha256Sum]){
val hash2 = other.asInstanceOf[Sha256Sum]
if(this.isTruncated == hash2.isTruncated)
Arrays.equals(this.bytes, hash2.bytes)
else
Arrays.equals(this.truncate.bytes, hash2.truncate.bytes)
}
else false
override def hashCode: Int = Arrays.hashCode(bytes)
override def toString: String = base64
}
object Sha256Sum {
val IdLength = 24
def fromBase64(hash: String): Try[Sha256Sum] = Try(Base64.getDecoder.decode(hash)).flatMap(fromBytes)
def fromBase64Url(hash: String): Try[Sha256Sum] = Try(Base64.getUrlDecoder.decode(hash)).flatMap(fromBytes)
def fromHex(hash: String): Try[Sha256Sum] = Try(parseHexArray(hash)).flatMap(fromBytes)
def fromString(hash: String): Try[Sha256Sum] = fromHex(hash).orElse(
fromBase64Url(hash).orElse(
fromBase64(hash).orElse(Failure(new Exception(
"Could not parse SHA-256 hashsum, expected a 32- or 18-byte array, either hex-, Base64Url-, or Base64-encoded"
)))
)
)
def fromBytes(bytes: Array[Byte]): Try[Sha256Sum] =
if(byteLengthCorrect(bytes))
Success(new Sha256Sum(bytes))
else
Failure(new IllegalArgumentException(byteLengthMessage) with NoStackTrace)
def unapply(hash: String): Option[Sha256Sum] = fromString(hash).toOption
val formatByte: Byte => String = b => String.format("%02x", Int.box(255 & b))
def parseHexArray(hex: String): Array[Byte] = {
val strLen = hex.length
assert(strLen % 2 == 0, "hex string must have even number of characters")
Array.tabulate(strLen / 2){i =>
Integer.parseInt(hex.substring(i * 2, (i + 1) * 2), 16).toByte
}
}
def byteLengthCorrect(bytes: Array[Byte]): Boolean = bytes.length == 32 || bytes.length == 18
val byteLengthMessage = "SHA-256 hash sum must be 32 (complete) or 18 (truncated) bytes long"
}
| ICOS-Carbon-Portal/meta | core/src/main/scala/se/lu/nateko/cp/meta/core/crypto/Sha256Sum.scala | Scala | gpl-3.0 | 3,072 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.integration.torch
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.RandomGenerator._
import com.intel.analytics.bigdl.utils.Table
import com.intel.analytics.bigdl.nn.CSubTable
import scala.collection.mutable.HashMap
import scala.util.Random
@com.intel.analytics.bigdl.tags.Serial
class CSubTableSpec extends TorchSpec {
"A CDivTable Module" should "generate correct output and grad" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val module = new CSubTable[Double]()
val input1 = Tensor[Double](5).apply1(e => Random.nextDouble())
val input2 = Tensor[Double](5).apply1(e => Random.nextDouble())
val gradOutput = Tensor[Double](5).apply1(e => Random.nextDouble())
val input = new Table()
input(1.toDouble) = input1
input(2.toDouble) = input2
val start = System.nanoTime()
val output = module.forward(input)
val gradInput = module.backward(input, gradOutput)
val end = System.nanoTime()
val scalaTime = end - start
val code = "torch.manualSeed(" + seed + ")\\n" +
"module = nn.CSubTable()\\n" +
"output = module:forward(input)\\n" +
"gradInput = module:backward(input,gradOutput)"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput"))
val luaOutput1 = torchResult("output").asInstanceOf[Tensor[Double]]
val luaOutput2 = torchResult("gradInput").asInstanceOf[Table]
luaOutput1 should be(output)
luaOutput2 should be (gradInput)
println("Test case : CSubTable, Torch : " + luaTime +
" s, Scala : " + scalaTime / 1e9 + " s")
}
}
| zhangxiaoli73/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/integration/torch/CSubTableSpec.scala | Scala | apache-2.0 | 2,301 |
package scala.reflect
import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import scala.tools.testkit.RunTesting
object ClassOfTest {
class VC(val x: Any) extends AnyVal
}
@RunWith(classOf[JUnit4])
class ClassOfTest extends RunTesting {
import runner._
@Test
def classOfValueClassAlias(): Unit = {
val code =
"""import scala.reflect.ClassOfTest.VC
|type aVC = VC
|type aInt = Int
|type aInteger = Integer
|classOf[VC] == classOf[aVC] &&
| classOf[aInt] == classOf[Int] &&
| classOf[aInteger] == classOf[Integer] &&
| classOf[aInt] != classOf[aInteger]
""".stripMargin
assertTrue(run[Boolean](code))
}
@Test
def classOfFinalVal(): Unit = {
val code =
"""class C {
| final val a1 = classOf[Int]
| final val b1 = classOf[List[_]]
| final val c1 = classOf[List[String]]
| final val d1 = classOf[Array[Int]]
| final val e1 = classOf[Array[List[_]]]
| final val f1 = classOf[Array[_]]
|
| val a2 = classOf[Int]
| val b2 = classOf[List[_]]
| val c2 = classOf[List[String]]
| val d2 = classOf[Array[Int]]
| val e2 = classOf[Array[List[_]]]
| val f2 = classOf[Array[_]]
|
| val listC = Class.forName("scala.collection.immutable.List")
|
| val compare = List(
| (a1, a2, Integer.TYPE),
| (b1, b2, listC),
| (c1, c2, listC),
| (d1, d2, Array(1).getClass),
| (e1, e2, Array(List()).getClass),
| (f1, f2, new Object().getClass))
|}
|(new C).compare
""".stripMargin
type K = Class[_]
val cs = run[List[(K, K, K)]](code)
for ((x, y, z) <- cs) {
assertEquals(x, y)
assertEquals(x, z)
}
}
@Test
def t9702(): Unit = {
val code =
"""import scala.tools.testkit.Resource
|import scala.reflect.ClassOfTest.VC
|class C {
| type aList[K] = List[K]
| type aVC = VC
| type aInt = Int
| type aInteger = Integer
| @Resource(`type` = classOf[List[Int]]) def a = 0
| @Resource(`type` = classOf[List[_]]) def b = 0
| @Resource(`type` = classOf[aList[_]]) def c = 0
| @Resource(`type` = classOf[Int]) def d = 0
| @Resource(`type` = classOf[aInt]) def e = 0
| @Resource(`type` = classOf[Integer]) def f = 0
| @Resource(`type` = classOf[aInteger]) def g = 0
| @Resource(`type` = classOf[VC]) def h = 0
| @Resource(`type` = classOf[aVC]) def i = 0
| @Resource(`type` = classOf[Array[Int]]) def j = 0
| @Resource(`type` = classOf[Array[List[_]]]) def k = 0
|}
|val c = classOf[C]
|def typeArg(meth: String) = c.getDeclaredMethod(meth).getDeclaredAnnotation(classOf[Resource]).`type`
|('a' to 'k').toList.map(_.toString).map(typeArg)
""".stripMargin
val l = Class.forName("scala.collection.immutable.List")
val i = Integer.TYPE
val ig = new Integer(1).getClass
val v = new ClassOfTest.VC(1).getClass
val ai = Array(1).getClass
val al = Array(List()).getClass
// sanity checks
assertEquals(i, classOf[Int])
assertNotEquals(i, ig)
assertEquals(run[List[Class[_]]](code),
List(l, l, l, i, i, ig, ig, v, v, ai, al))
}
@Test
def classOfUnitConstant(): Unit = {
val code =
"""abstract class A { def f: Class[_] }
|class C extends A { final val f = classOf[Unit] }
|val c = new C
|(c.f, (c: A).f)
""".stripMargin
val u = Void.TYPE
assertEquals(run[(Class[_], Class[_])](code), (u, u))
}
}
| martijnhoekstra/scala | test/junit/scala/reflect/ClassOfTest.scala | Scala | apache-2.0 | 3,870 |
import com.slick101.test.cases.queries.CourseModel._
import com.slick101.test.{BaseTest, ServerDb}
import slick.jdbc.H2Profile.api._
import slick.lifted
import slick.lifted.Functions._
import com.slick101.test.cases.conversation.TypesafeId._
import scala.concurrent.ExecutionContext.Implicits.global
// show general scheme of executing query (db.run, result)
// show grouping by
class QueriesSpec extends BaseTest with ServerDb {
// tests
"Students search" must {
"return at least 5 students" in {
db.run(StudentTable.result).map { results =>
results.length should be >= 5
}.futureValue
db.run(
StudentTable.map(student =>
(student.name, student.surname)
).result
).map { results =>
results.length should be >= 5
}.futureValue
}
"general query test" in {
db.run(
StudentTable
.result
).map { results =>
log.info(s"\\n${results.mkString("\\n")}")
results.length should be > 0
}.futureValue
}
}
"various tests" must {
"generate queries" in {
simpleSelect
projections
filtering
otherQueries
}
}
def simpleSelect: Unit = {
log.info("=== Simple select")
querySync(
StudentTable
)
log.info("=== Simple select / for-comprehension")
querySync(
for {student <- StudentTable }
yield student
)
}
def projections: Unit = {
log.info("=== Simple select with projection")
querySync(
StudentTable
.map(_.name)
)
log.info("=== Multiple mapping")
querySync(
StudentTable
.map(nat => nat.nationality ++ " ")
.map(_.toUpperCase)
.map(_.trim)
.map((_, currentTime, pi))
.map(row => row._1 ++ " " ++ row._2.asColumnOf[String] ++ " " ++ row._3.asColumnOf[String])
)
log.info("=== Simple select with more complicated projection")
querySync(
StudentTable
.sortBy(_.name)
.map(s => (s.name, s.middleName.ifNull("*no-middlename*")))
)
log.info("=== Simple select with more complicated projection (reversed order)")
querySync(
StudentTable
.map(s => (s.name, s.middleName.ifNull("*no-middlename*")))
.sortBy(_._1)
)
}
def filtering {
log.info("=== Select with filter")
querySync(
StudentTable.filter(_.name === "Tom")
)
log.info("=== Select with filter / for-comprehension")
querySync(
(for {
student <- StudentTable if student.name === "Tom"
} yield student)
)
log.info("=== Select with more filters")
querySync(
StudentTable
.filterNot(student => student.name === "Tom" && student.surname.startsWith("Smi"))
)
log.info("=== Select with sorting")
querySync(
StudentTable
.filter(student => student.middleName.nonEmpty)
.sortBy(_.name)
)
log.info("=== Select more complicated sorting")
querySync(
StudentTable
.filter(student => student.middleName.nonEmpty)
.sortBy(s => (s.name.desc, s.middleName.asc))
.distinct
)
log.info("=== Select more distinct")
querySync(
StudentTable
.map(_.name)
.distinct
)
}
def otherQueries: Unit = {
log.info("=== Select limit / offset")
querySync(
StudentTable
.map(s => (s.name, s.surname))
.drop(2)
.take(3)
)
log.info("=== Select limit / offset (reversed order)")
querySync(
StudentTable
.map(s => (s.name, s.surname))
.take(3)
.drop(2)
)
log.info("=== Interesting distinct")
querySync(
StudentTable
.map(s => (s.name, s.surname))
.distinctOn(_._1)
)
log.info("=== Group by")
querySync(
StudentTable
.filter(_.surname =!= "Test")
.groupBy(_.surname)
.map { case (surname, group) =>
(surname, group.map(_.name).countDistinct)
}
.filter(row => row._2 > 5)
)
}
}
| nexelem/slick-101 | src/test/scala/com/slick101/test/cases/queries/QueriesSpec.scala | Scala | apache-2.0 | 4,056 |
package chapter.fourteen
import ExerciseOne._
import org.scalatest._
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
@RunWith(classOf[JUnitRunner])
class ExerciseOneSpec extends FlatSpec with Matchers {
"function" should "" in {
}
}
| deekim/impatient-scala | src/test/scala/chapter/fourteen/ExerciseOneSpec.scala | Scala | apache-2.0 | 264 |
/*
* Copyright (c) 2017-2022 Lymia Alusyia <lymia@lymiahugs.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package moe.lymia.princess.svg
import moe.lymia.princess.core._
import moe.lymia.princess.core.gamedata.GameData
import moe.lymia.princess.util._
import org.eclipse.swt.SWT
import org.eclipse.swt.graphics.ImageLoader
import java.awt.Font
import java.io.ByteArrayOutputStream
import java.nio.file.{Files, Path}
import java.util.Base64
import scala.collection.mutable
import scala.xml.{XML => _, _}
// TODO: Add caching for these resources between renders
trait ResourceLoader {
def loadRaster (cache: SizedCache, reencode: Option[String], expectedMime: String, path: Path): Elem
def loadVector (cache: SizedCache, path: Path): Elem
def loadDefinition(cache: SizedCache, path: Path): Elem
}
private object ResourceLoader {
val dataURLLoader: DataURLRasterLoader = new DataURLRasterLoader {}
val normalSchemes = Set("http", "https", "ftp", "file")
val loadXMLCache = new CacheSection[Path, Elem]
val loadImageCache = new CacheSection[Path, String]
def cachedLoadXML(cache: SizedCache, path: Path): Elem = cache.cached(loadXMLCache)(path, {
val size = Files.size(path)
(XML.load(Files.newInputStream(path)), size)
})
}
trait IncludeDefinitionLoader {
def loadDefinition(cache: SizedCache, path: Path): Elem = ResourceLoader.cachedLoadXML(cache, path)
}
trait IncludeVectorLoader {
def loadVector(cache: SizedCache, path: Path): Elem =
ResourceLoader.cachedLoadXML(cache, path) % Attribute(null, "overflow", "hidden", Null)
}
trait LinkRasterLoader {
def loadRaster(cache: SizedCache, reencode: Option[String], expectedMime: String, path: Path): Elem = {
val uri = path.toUri
if(ResourceLoader.normalSchemes.contains(uri.getScheme))
<image xlink:href={path.toUri.toASCIIString}/>
else ResourceLoader.dataURLLoader.loadRaster(cache, reencode, expectedMime, path)
}
}
trait DataURLRasterLoader {
def loadRaster(cache: SizedCache, reencode: Option[String], expectedMime: String, path: Path): Elem =
<image xlink:href={cache.cached(ResourceLoader.loadImageCache)(path, {
val data = reencode match {
case None => Files.readAllBytes(path)
case Some(reencodeTo) =>
val imageReader = new ImageLoader()
imageReader.load(Files.newInputStream(path))
val byteOut = new ByteArrayOutputStream()
imageReader.save(byteOut, reencodeTo.toLowerCase() match {
case "image/png" => SWT.IMAGE_PNG
case "image/jpeg" => SWT.IMAGE_JPEG
})
byteOut.toByteArray
}
val uri = s"data:$expectedMime;base64,${Base64.getEncoder.encodeToString(data)}"
(uri, uri.length)
})}/>
}
object RasterizeResourceLoader
extends ResourceLoader with IncludeDefinitionLoader with IncludeVectorLoader with LinkRasterLoader
object ExportResourceLoader
extends ResourceLoader with IncludeDefinitionLoader with IncludeVectorLoader with DataURLRasterLoader
private sealed trait ImageFormatType
private object ResourceFormatType {
case class Raster(mime: String, reencode: Option[String] = None) extends ImageFormatType
case object Vector extends ImageFormatType
}
private case class ImageFormat(extensions: Seq[String], formatType: ImageFormatType)
final class ResourceManager(builder: SVGBuilder, settings: RenderSettings, cache: SizedCache,
loader: ResourceLoader, packages: GameData) {
lazy val systemFont: Font = {
val tryResolve = packages.getSystemExports("princess/system_font").headOption.flatMap(x =>
packages.resolve(x.path).map(path => Font.createFont(Font.TRUETYPE_FONT, Files.newInputStream(path))))
tryResolve.getOrElse(new Font(Font.SANS_SERIF, Font.PLAIN, 1))
}
private def stripExtension(name: String) = {
val split = name.split("/")
val components = split.last.split("\\\\.")
(split.init :+ (if(components.length == 1) components.head else components.init.mkString("."))).mkString("/")
}
private def tryFindImageResource(name: String, bounds: Bounds) =
ResourceManager.formatSearchList.view.map { case (extension, format) =>
packages.resolve(s"$name.$extension").map(fullPath =>
format.formatType match {
case ResourceFormatType.Raster(mime, reencode) =>
builder.createDefinitionFromContainer(name, bounds, loader.loadRaster(cache, reencode, mime, fullPath))
case ResourceFormatType.Vector =>
builder.createDefinitionFromContainer(name, bounds, loader.loadVector(cache, fullPath))
}
)
}.find(_.isDefined).flatten
val imageResourceCache = new mutable.HashMap[String, Option[SVGDefinitionReference]]
def loadImageResource(name: String, bounds: Bounds): SVGDefinitionReference =
imageResourceCache.getOrElseUpdate(stripExtension(name), tryFindImageResource(stripExtension(name), bounds))
.getOrElse(throw EditorException(s"image '$name' not found"))
private def tryFindDefinition(name: String) =
packages.resolve(name).map(path =>
builder.createDefinition(name, loader.loadDefinition(cache, path), isDef = true))
val definitionCache = new mutable.HashMap[String, Option[String]]
def loadDefinition(name: String): String =
definitionCache.getOrElseUpdate(name, tryFindDefinition(name))
.getOrElse(throw EditorException(s"definition '$name' not found"))
}
object ResourceManager {
private val imageFormats = Seq(
ImageFormat(Seq("svg"), ResourceFormatType.Vector),
ImageFormat(Seq("png"), ResourceFormatType.Raster("image/png")),
ImageFormat(Seq("bmp"), ResourceFormatType.Raster("image/png", Some("png"))), // bmp is lossless but big
ImageFormat(Seq("jpg", "jpeg"), ResourceFormatType.Raster("image/jpeg"))
)
private val formatSearchList = imageFormats.flatMap(x => x.extensions.map(y => (y, x)))
} | Lymia/PrincessEdit | modules/princess-edit/src/main/scala/moe/lymia/princess/svg/resources.scala | Scala | mit | 6,957 |
package com.ru.waka.servlets
import java.io.{File, FileInputStream}
import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse}
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential
import com.google.api.services.calendar.CalendarScopes
import com.ru.waka.CalendarClient
import com.ru.waka.repository.EventRepositoryOnMemory
import com.typesafe.scalalogging.LazyLogging
import org.json4s.DefaultFormats
import org.json4s.ext.JodaTimeSerializers
import org.json4s.native.Serialization
import scala.collection.JavaConverters._
class MokumokuEvents extends HttpServlet with LazyLogging {
import MokumokuEvents._
implicit val format = DefaultFormats ++ JodaTimeSerializers.all
val credential = {
if (new File(credentialJsonPath).exists()) {
logger.info(s"$credentialJsonPath File was found")
GoogleCredential.fromStream(new FileInputStream(credentialJsonPath))
} else {
logger.info(s"$credentialJsonPath File was not found. Default Credential will be used")
GoogleCredential.getApplicationDefault()
}
}.createScoped(Seq(CalendarScopes.CALENDAR_READONLY).asJava)
val calendarClient = new CalendarClient(credential, eventCalendarId)
val eventRepository = new EventRepositoryOnMemory(calendarClient)
override def doGet(request: HttpServletRequest, response: HttpServletResponse) {
eventRepository.update.leftMap(th => logger.error(th.getMessage, th))
val body = Serialization.write(eventRepository.getAll.map(_.toMap))
response.setContentType("application/json;charset=UTF-8")
response.getWriter.println(body)
}
}
object MokumokuEvents {
val credentialJsonPath = "dark-google-calendar.json"
val eventCalendarId = "h2j0hj6rh0kadoi561c03amv84@group.calendar.google.com"
}
| ngineerxiv/dark-api | src/main/scala/com/ru/waka/servlets/MokumokuEvents.scala | Scala | mit | 1,782 |
/*
* Copyright 2014 Eric Zoerner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalable.client
import java.net.InetSocketAddress
import javafx.scene.Parent
import javafx.{ scene ⇒ jfxs }
import akka.actor._
import akka.io.Tcp.{ Connect, Connected, ConnectionClosed }
import scala.reflect.runtime.universe.typeOf
import scalable.client.chat.{ ChatController, ChatHandler }
import scalable.client.login.{ LoginHandler, LoginListener }
import scalable.client.tcp.TcpClient
import scalable.infrastructure.api._
import scalafx.Includes._
import scalafx.application.Platform
import scalafx.scene.Scene
import scalafx.stage.Stage
import scalafxml.core.{ DependenciesByType, FXMLLoader }
/**
* Root actor, used for tracking the user's client session information.
*
* @author Eric Zoerner <a href="mailto:eric.zoerner@gmail.com">eric.zoerner@gmail.com</a>
*/
object ClientApp {
val path = "root"
def props(loginListener: LoginListener) = Props(new ClientApp(loginListener))
}
class ClientApp(loginListener: LoginListener)
extends Actor with ActorLogging with ChatHandler with LoginHandler {
private val tcpClient = context.actorOf(TcpClient.props(self), TcpClient.path)
private var login: Option[AskLogin] = None
addListener(loginListener)
def connect(host: String, port: Int, loginMsg: AskLogin) = {
login = Some(loginMsg)
tcpClient ! Connect(new InetSocketAddress(host, port))
}
def openLobby(username: String): Unit = Platform.runLater {
removeListener(loginListener)
val loader: FXMLLoader = new FXMLLoader(getClass.getResource("Lobby.fxml"),
new DependenciesByType(Map(typeOf[String] → username,
typeOf[ActorSystem] → context.system,
typeOf[ChatHandler] → this,
typeOf[LoginHandler] → this,
typeOf[String] → username)))
loader.load()
val root: Parent = loader.getRoot[jfxs.Parent]
val controller = loader.getController[ChatController]()
val stage: Stage = new Stage() {
title = "Lobby"
scene = new Scene(root)
}
stage.show()
controller.setStageAndSetupListeners(stage)
}
override def receive = {
case msg: Connected ⇒
log.info(msg.toString)
tcpClient ! login.get
case (host: String, port: Int, msg: AskLogin) ⇒ connect(host, port, msg)
case OpenLobby(username) ⇒ openLobby(username)
case Join(username, roomName) ⇒ handleJoined(username, roomName)
case LeaveChat(username, roomName) ⇒ handleLeft(username, roomName)
case Chat(id, username, roomName, htmlText) ⇒ handleChat(id.get, username, roomName, htmlText)
case RoomInfo(roomName, history, participants) ⇒ handleRoomInfo(roomName, history, participants)
case LoginResult(resultStatus, username) ⇒ handleLoginResult(resultStatus, username)
case msg: ConnectionClosed ⇒
handleConnectionClosed()
case msg ⇒ log.info(s"ClientApp received: $msg")
}
}
object Terminator {
def props(actor: ActorRef) = Props(new Terminator(actor))
def path = "terminator"
}
class Terminator(actor: ActorRef) extends Actor with ActorLogging {
context watch actor
def receive = {
case Terminated(_) ⇒
log.info("{} has terminated, shutting down system", actor.path)
context.system.shutdown()
}
}
case class OpenLobby(username: String)
| ezoerner/scalable-chat | client/src/main/scala/scalable/client/ClientApp.scala | Scala | apache-2.0 | 3,901 |
package shark.server
import org.apache.hive.service.cli.CLIService
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.shims.ShimLoader
import org.apache.hive.service.auth.HiveAuthFactory
import java.io.IOException
import org.apache.hive.service.ServiceException
import javax.security.auth.login.LoginException
import org.apache.spark.SparkEnv
import shark.{SharkServer, Utils}
class SharkCLIService extends CLIService {
override def init(hiveConf: HiveConf) {
this.synchronized {
Utils.setSuperField("hiveConf", hiveConf, this)
val sharkSM = new SharkSessionManager
Utils.setSuperField("sessionManager", sharkSM, this)
addService(sharkSM)
try {
HiveAuthFactory.loginFromKeytab(hiveConf)
val serverUserName = ShimLoader.getHadoopShims
.getShortUserName(ShimLoader.getHadoopShims.getUGIForConf(hiveConf))
Utils.setSuperField("serverUserName", serverUserName, this)
} catch {
case e: IOException => {
throw new ServiceException("Unable to login to kerberos with given principal/keytab", e)
}
case e: LoginException => {
throw new ServiceException("Unable to login to kerberos with given principal/keytab", e)
}
}
// Make sure the ThreadLocal SparkEnv reference is the same for all threads.
SparkEnv.set(SharkServer.sparkEnv)
sharkInit(hiveConf)
}
}
}
| lzshlzsh/shark | src/main/scala/shark/server/SharkCLIService.scala | Scala | apache-2.0 | 1,433 |
package com.twitter.finagle.httpx
import com.twitter.io.{Buf, Reader => BufReader, Writer => BufWriter}
import com.twitter.finagle.netty3.{ChannelBufferBuf, BufChannelBuffer}
import com.twitter.finagle.httpx.netty.{HttpMessageProxy, Bijections}
import com.twitter.util.{Await, Duration, Closable}
import java.io.{InputStream, InputStreamReader, OutputStream, OutputStreamWriter, Reader, Writer}
import java.util.{Iterator => JIterator}
import java.nio.charset.Charset
import java.util.{Date, TimeZone}
import org.apache.commons.lang.StringUtils
import org.apache.commons.lang.time.FastDateFormat
import org.jboss.netty.buffer.{
ChannelBufferInputStream, DynamicChannelBuffer, ChannelBuffer,
ChannelBufferOutputStream, ChannelBuffers
}
import scala.collection.JavaConverters._
import Bijections._
/**
* Rich Message
*
* Base class for Request and Response. There are both input and output
* methods, though only one set of methods should be used.
*/
abstract class Message extends HttpMessageProxy {
private[this] val readerWriter = BufReader.writable()
/**
* A read-only handle to the internal stream of bytes, representing the
* message body. See [[com.twitter.util.Reader]] for more information.
**/
def reader: BufReader = readerWriter
/**
* A write-only handle to the internal stream of bytes, representing the
* message body. See [[com.twitter.util.Writer]] for more information.
**/
def writer: BufWriter with Closable = readerWriter
def isRequest: Boolean
def isResponse = !isRequest
def content: Buf = ChannelBufferBuf(getContent())
def content_=(content: Buf) { setContent(BufChannelBuffer(content)) }
def version: Version = from(getProtocolVersion())
def version_=(version: Version) { setProtocolVersion(from(version)) }
lazy val headerMap: HeaderMap = new MessageHeaderMap(this)
/**
* Cookies. In a request, this uses the Cookie headers.
* In a response, it uses the Set-Cookie headers.
*/
lazy val cookies = new CookieMap(this)
// Java users: use the interface below for cookies
/** Get iterator over Cookies */
def getCookies(): JIterator[Cookie] = cookies.valuesIterator.asJava
/** Add a cookie */
def addCookie(cookie: Cookie) {
cookies += cookie
}
/** Remove a cookie */
def removeCookie(name: String) {
cookies -= name
}
/** Accept header */
def accept: Seq[String] =
Option(headers.get(Fields.Accept)) match {
case Some(s) => s.split(",").map(_.trim).filter(_.nonEmpty)
case None => Seq()
}
/** Set Accept header */
def accept_=(value: String) { headers.set(Fields.Accept, value) }
/** Set Accept header with list of values */
def accept_=(values: Iterable[String]) { accept = values.mkString(", ") }
/** Accept header media types (normalized, no parameters) */
def acceptMediaTypes: Seq[String] =
accept.map {
_.split(";", 2).headOption
.map(_.trim.toLowerCase) // media types are case-insensitive
.filter(_.nonEmpty) // skip blanks
}.flatten
/** Allow header */
def allow: Option[String] = Option(headers.get(Fields.Allow))
/** Set Authorization header */
def allow_=(value: String) { headers.set(Fields.Allow, value) }
/** Set Authorization header */
def allow_=(values: Iterable[Method]) { allow = values.mkString(",").toUpperCase }
/** Get Authorization header */
def authorization: Option[String] = Option(headers.get(Fields.Authorization))
/** Set Authorization header */
def authorization_=(value: String) { headers.set(Fields.Authorization, value) }
/** Get Cache-Control header */
def cacheControl: Option[String] = Option(headers.get(Fields.CacheControl))
/** Set Cache-Control header */
def cacheControl_=(value: String) { headers.set(Fields.CacheControl, value) }
/** Set Cache-Control header with a max-age (and must-revalidate). */
def cacheControl_=(maxAge: Duration) {
cacheControl = "max-age=" + maxAge.inSeconds.toString + ", must-revalidate"
}
/** Get charset from Content-Type header */
def charset: Option[String] = {
contentType.foreach { contentType =>
val parts = StringUtils.split(contentType, ';')
1.to(parts.length - 1) foreach { i =>
val part = parts(i).trim
if (part.startsWith("charset=")) {
val equalsIndex = part.indexOf('=')
val charset = part.substring(equalsIndex + 1)
return Some(charset)
}
}
}
None
}
/** Set charset in Content-Type header. This does not change the content. */
def charset_=(value: String) {
val contentType = this.contentType.getOrElse("")
val parts = StringUtils.split(contentType, ';')
if (parts.isEmpty) {
this.contentType = ";charset=" + value // malformed
return
}
val builder = new StringBuilder(parts(0))
if (!(parts.exists { _.trim.startsWith("charset=") })) {
// No charset parameter exist, add charset after media type
builder.append(";charset=")
builder.append(value)
// Copy other parameters
1.to(parts.length - 1) foreach { i =>
builder.append(";")
builder.append(parts(i))
}
} else {
// Replace charset= parameter(s)
1.to(parts.length - 1) foreach { i =>
val part = parts(i)
if (part.trim.startsWith("charset=")) {
builder.append(";charset=")
builder.append(value)
} else {
builder.append(";")
builder.append(part)
}
}
}
this.contentType = builder.toString
}
/** Get Content-Length header. Use length to get the length of actual content. */
def contentLength: Option[Long] =
Option(headers.get(Fields.ContentLength)).map { _.toLong }
/** Set Content-Length header. Normally, this is automatically set by the
* Codec, but this method allows you to override that. */
def contentLength_=(value: Long) {
headers.set(Fields.ContentLength, value.toString)
}
/** Get Content-Type header */
def contentType: Option[String] = Option(headers.get(Fields.ContentType))
/** Set Content-Type header */
def contentType_=(value: String) { headers.set(Fields.ContentType, value) }
/** Set Content-Type header by media-type and charset */
def setContentType(mediaType: String, charset: String = "utf-8") {
headers.set(Fields.ContentType, mediaType + ";charset=" + charset)
}
/** Set Content-Type header to application/json;charset=utf-8 */
def setContentTypeJson() { headers.set(Fields.ContentType, Message.ContentTypeJson) }
/** Get Date header */
def date: Option[String] = Option(headers.get(Fields.Date))
/** Set Date header */
def date_=(value: String) { headers.set(Fields.Date, value) }
/** Set Date header by Date */
def date_=(value: Date) { date = Message.httpDateFormat(value) }
/** Get Expires header */
def expires: Option[String] = Option(headers.get(Fields.Expires))
/** Set Expires header */
def expires_=(value: String) { headers.set(Fields.Expires, value) }
/** Set Expires header by Date */
def expires_=(value: Date) { expires = Message.httpDateFormat(value) }
/** Get Host header */
def host: Option[String] = Option(headers.get(Fields.Host))
/** Set Host header */
def host_=(value: String) { headers.set(Fields.Host, value) }
/** Get Last-Modified header */
def lastModified: Option[String] = Option(headers.get(Fields.LastModified))
/** Set Last-Modified header */
def lastModified_=(value: String) { headers.set(Fields.LastModified, value) }
/** Set Last-Modified header by Date */
def lastModified_=(value: Date) { lastModified = Message.httpDateFormat(value) }
/** Get Location header */
def location: Option[String] = Option(headers.get(Fields.Location))
/** Set Location header */
def location_=(value: String) { headers.set(Fields.Location, value) }
/** Get media-type from Content-Type header */
def mediaType: Option[String] =
contentType.flatMap { contentType =>
val beforeSemi =
contentType.indexOf(";") match {
case -1 => contentType
case n => contentType.substring(0, n)
}
val mediaType = beforeSemi.trim
if (mediaType.nonEmpty)
Some(mediaType.toLowerCase)
else
None
}
/**
* Set media-type in Content-Type header. Charset and parameter values are
* preserved, though may not be appropriate for the new media type.
*/
def mediaType_=(value: String) {
contentType match {
case Some(contentType) =>
val parts = StringUtils.split(contentType, ";", 2)
if (parts.length == 2) {
this.contentType = value + ";" + parts(1)
} else {
this.contentType = value
}
case None =>
this.contentType = value
}
}
/** Get Referer [sic] header */
def referer: Option[String] = Option(headers.get(Fields.Referer))
/** Set Referer [sic] header */
def referer_=(value: String) { headers.set(Fields.Referer, value) }
/** Get Retry-After header */
def retryAfter: Option[String] = Option(headers.get(Fields.RetryAfter))
/** Set Retry-After header */
def retryAfter_=(value: String) { headers.set(Fields.RetryAfter, value) }
/** Set Retry-After header by seconds */
def retryAfter_=(value: Long) { retryAfter = value.toString }
/** Get Server header */
def server: Option[String] = Option(headers.get(Fields.Server))
/** Set Server header */
def server_=(value: String) { headers.set(Fields.Server, value) }
/** Get User-Agent header */
def userAgent: Option[String] = Option(headers.get(Fields.UserAgent))
/** Set User-Agent header */
def userAgent_=(value: String) { headers.set(Fields.UserAgent, value) }
/** Get WWW-Authenticate header */
def wwwAuthenticate: Option[String] = Option(headers.get(Fields.WwwAuthenticate))
/** Set WWW-Authenticate header */
def wwwAuthenticate_=(value: String) { headers.set(Fields.WwwAuthenticate, value) }
/** Get X-Forwarded-For header */
def xForwardedFor: Option[String] = Option(headers.get("X-Forwarded-For"))
/** Set X-Forwarded-For header */
def xForwardedFor_=(value: String) { headers.set("X-Forwarded-For", value) }
/**
* Check if X-Requested-With contains XMLHttpRequest, usually signalling a
* request from a JavaScript AJAX libraries. Some servers treat these
* requests specially. For example, an endpoint might render JSON or XML
* instead HTML if it's an XmlHttpRequest. (Tip: don't do this - it's gross.)
*/
def isXmlHttpRequest = {
Option(headers.get("X-Requested-With")) exists { _.toLowerCase.contains("xmlhttprequest") }
}
/** Get length of content. */
def length: Int = getContent.readableBytes
def getLength(): Int = length
/** Get the content as a string. */
def contentString: String = {
val encoding = try {
Charset.forName(charset getOrElse "UTF-8")
} catch {
case _: Throwable => Message.Utf8
}
getContent.toString(encoding)
}
def getContentString(): String = contentString
/** Set the content as a string. */
def contentString_=(value: String) {
if (value != "")
setContent(BufChannelBuffer(Buf.ByteArray(value.getBytes("UTF-8"))))
else
setContent(BufChannelBuffer(Buf.Empty))
}
def setContentString(value: String) { contentString = value }
/**
* Use content as InputStream. The underlying channel buffer's reader
* index is advanced. (Scala interface. Java users can use getInputStream().)
*/
def withInputStream[T](f: InputStream => T): T = {
val inputStream = getInputStream()
val result = f(inputStream) // throws
inputStream.close()
result
}
/**
* Get InputStream for content. Caller must close. (Java interface. Scala
* users should use withInputStream.)
*/
def getInputStream(): InputStream =
new ChannelBufferInputStream(getContent)
/** Use content as Reader. (Scala interface. Java usrs can use getReader().) */
def withReader[T](f: Reader => T): T = {
withInputStream { inputStream =>
val reader = new InputStreamReader(inputStream)
f(reader)
}
}
/** Get Reader for content. (Java interface. Scala users should use withReader.) */
def getReader(): Reader =
new InputStreamReader(getInputStream())
/** Append string to content. */
def write(string: String) {
write(string.getBytes("UTF-8"))
}
/** Append bytes to content. */
def write(bytes: Array[Byte]) {
getContent match {
case buffer: DynamicChannelBuffer =>
buffer.writeBytes(bytes)
case _ =>
val buffer = ChannelBuffers.wrappedBuffer(bytes)
write(buffer)
}
}
/** Append ChannelBuffer to content.
*
* If `isChunked` then multiple writes must be composed using `writer` and
* `flatMap` to have the appropriate backpressure semantics.
*
* Attempting to `write` after calling `close` will result in a thrown
* [[com.twitter.util.Reader.ReaderDiscarded]].
*/
@throws(classOf[BufReader.ReaderDiscarded])
@throws(classOf[IllegalStateException])
def write(buffer: ChannelBuffer) {
if (isChunked) writeChunk(buffer) else {
getContent match {
case ChannelBuffers.EMPTY_BUFFER =>
setContent(buffer)
case content =>
setContent(ChannelBuffers.wrappedBuffer(content, buffer))
}
}
}
/**
* Use content as OutputStream. Content is replaced with stream contents.
* (Java users can use this with a Function, or use Netty's ChannelBufferOutputStream
* and then call setContent() with the underlying buffer.)
*/
def withOutputStream[T](f: OutputStream => T): T = {
// Use buffer size of 1024. Netty default is 256, which seems too small.
// Netty doubles buffers on resize.
val outputStream = new ChannelBufferOutputStream(ChannelBuffers.dynamicBuffer(1024))
val result = f(outputStream) // throws
outputStream.close()
write(outputStream.buffer)
result
}
/** Use as a Writer. Content is replaced with writer contents. */
def withWriter[T](f: Writer => T): T = {
withOutputStream { outputStream =>
val writer = new OutputStreamWriter(outputStream, Message.Utf8)
val result = f(writer)
writer.close()
// withOutputStream will write()
result
}
}
/** Clear content (set to ""). */
def clearContent() {
setContent(ChannelBuffers.EMPTY_BUFFER)
}
/** End the response stream. */
def close() = writer.close()
private[this] def writeChunk(buf: ChannelBuffer) {
if (buf.readable) {
val future = writer.write(new ChannelBufferBuf(buf))
// Unwraps the future in the Return case, or throws exception in the Throw case.
if (future.isDefined) Await.result(future)
}
}
}
object Message {
private[httpx] val Utf8 = Charset.forName("UTF-8")
@deprecated("Use MediaType.Json", "6.1.5")
val MediaTypeJson = "application/json"
@deprecated("Use MediaType.Javascript", "6.1.5")
val MediaTypeJavascript = "application/javascript"
@deprecated("Use MediaType.WwwForm", "6.1.5")
val MediaTypeWwwForm = "application/x-www-form-urlencoded"
val CharsetUtf8 = "charset=utf-8"
val ContentTypeJson = MediaType.Json + ";" + CharsetUtf8
val ContentTypeJavascript = MediaType.Javascript + ";" + CharsetUtf8
val ContentTypeWwwFrom = MediaType.WwwForm + ";" + CharsetUtf8
private val HttpDateFormat = FastDateFormat.getInstance("EEE, dd MMM yyyy HH:mm:ss",
TimeZone.getTimeZone("GMT"))
def httpDateFormat(date: Date): String =
HttpDateFormat.format(date) + " GMT"
}
| yancl/finagle-6.22.0 | finagle-httpx/src/main/scala/com/twitter/finagle/httpx/Message.scala | Scala | apache-2.0 | 15,705 |
package myproblem
class Foo
| Tapad/sbt-jarjar | src/sbt-test/sbtjarjar/sample-project/src/main/scala/myproblem/Foo.scala | Scala | bsd-3-clause | 29 |
package lib
import io.apibuilder.spec.v0.models.{Enum, EnumValue, Field, Model, Service, Union, UnionType}
object ServiceBuilder {
implicit class ServiceBuilder(val service: Service) extends AnyVal {
def withModel(name: String,
modelTaylor: Model => Model = identity): Service = {
val model = Model(
name = name,
plural = s"${name}s",
fields = Nil
)
service.copy(
models = service.models ++ Seq(modelTaylor(model))
)
}
def withEnum(name: String,
enumTailor: Enum => Enum = identity): Service = {
val `enum` = Enum(
name = name,
plural = s"${name}s",
values = Nil
)
service.copy(
enums = service.enums ++ Seq(enumTailor(`enum`))
)
}
def withUnion(name: String,
unionTailor: Union => Union = identity,
discriminator: Option[String] = None): Service = {
val union = Union(
name = name,
plural = s"${name}s",
discriminator = discriminator,
types = Nil
)
service.copy(
unions = service.unions ++ Seq(unionTailor(union))
)
}
}
implicit class ModelBuilder(val model: Model) extends AnyVal {
def withField(name: String,
fieldType: String,
default: Option[String] = None,
required: Boolean = true): Model = {
val field = Field(
name = name,
`type` = fieldType,
default = default,
required = required)
model.copy(fields = model.fields ++ Seq(field))
}
}
implicit class EnumBuilder(val `enum`: Enum) extends AnyVal {
def withValue(name: String,
value: Option[String] = None): Enum = {
`enum`.copy(
values = `enum`.values ++ Seq(EnumValue(name = name, value = value))
)
}
}
implicit class UnionBuilder(val union: Union) extends AnyVal {
def withType(`type`: String,
discriminatorValue: Option[String] = None,
isDefault: Boolean = false): Union = {
val unionType = UnionType(
`type` = `type`,
default = Some(true).filter(_ => isDefault),
discriminatorValue = discriminatorValue
)
union.copy(
types = union.types ++ Seq(unionType)
)
}
}
}
| apicollective/apibuilder | api/app/lib/ServiceBuilder.scala | Scala | mit | 2,374 |
package info.simsimy.MsgPackParser
import info.simsimy.MsgPackParser.NodeTypes._
/**
* Created by sim on 15/12/2016.
*/
object EasyParser {
private val Parser = new ParserTools(getNode)
def Parse(data: Array[Byte]): Node = {
return getNode(new ByteReader(data))
}
private def getNode(data: ByteReader): Node = {
val inp = data.getByte()
val b = (inp & 0xFF)
return b match {
case 0xc0 => NullNode()
case 0xc1 => UnusedNode()
case 0xc2 => FalseNode
case 0xc3 => TrueNode
case 0xc4 => ByteArrayNode(Parser.getBinArray(data, 1))
case 0xc5 => ByteArrayNode(Parser.getBinArray(data, 2))
case 0xc6 => ByteArrayNode(Parser.getBinArray(data, 4))
case 0xc7 => ExtNode(Parser.readExtData(data, 1))
case 0xc8 => ExtNode(Parser.readExtData(data, 2))
case 0xc9 => ExtNode(Parser.readExtData(data, 4))
case 0xca => DoubleNode(data.getFloat())
case 0xcb => DoubleNode(data.getDouble())
case 0xcc => BigIntNode(data.getUShort().asInstanceOf[Int])
case 0xcd => BigIntNode(data.getUInt())
case 0xce => BigIntNode(data.getULong())
case 0xcf => BigIntNode(data.getUBigInt())
case 0xd0 => BigIntNode(data.getShort().asInstanceOf[Int])
case 0xd1 => BigIntNode(data.getInt())
case 0xd2 => BigIntNode(data.getLong())
case 0xd3 => BigIntNode(data.getBigInt())
case 0xd4 => ExtNode(ExtData(data.getShort(), data.getBytes(1)))
case 0xd5 => ExtNode(ExtData(data.getShort(), data.getBytes(2)))
case 0xd6 => ExtNode(ExtData(data.getShort(), data.getBytes(4)))
case 0xd7 => ExtNode(ExtData(data.getShort(), data.getBytes(8)))
case 0xd8 => ExtNode(ExtData(data.getShort(), data.getBytes(16)))
case 0xd9 => StringNode(Parser.getStrng(data, 1))
case 0xda => StringNode(Parser.getStrng(data, 2))
case 0xdb => StringNode(Parser.getStrng(data, 32))
case 0xdc => NodeArrayNode(Parser.getArray(data, 2))
case 0xdd => NodeArrayNode(Parser.getArray(data, 4))
case 0xde => MapNode(Parser.getMap(data, 2))
case 0xdf => MapNode(Parser.getMap(data, 4))
case i if (b <= 0x7f) => ShortIntNode((i & 0xFF).asInstanceOf[Short])
case i if (b >= 0x80 && b <= 0x8f) => MapNode(Parser.getFixMap(data, (i.asInstanceOf[Short] - 0x80.asInstanceOf[Short]).asInstanceOf[Short]))
case i if (b >= 0x90 && b <= 0x9f) => NodeArrayNode(Parser.getFixArray(data, (i.asInstanceOf[Short] - 0x90.asInstanceOf[Short]).asInstanceOf[Short]))
case i if (b >= 0xa0 && b <= 0xbf) => StringNode(Parser.getFixString(data, (i.asInstanceOf[Short] - 0xa0.asInstanceOf[Short]).asInstanceOf[Short]))
case i if (b >= 0xe0 && b <= 0xff) => ShortIntNode((i.asInstanceOf[Short] - (0xFF).asInstanceOf[Short] - 1).asInstanceOf[Short])
}
}
object Implicits {
implicit def MapNodeToMap(s: ValueNode[Map[Node, Node]]): Map[Node, Node] = s.value
implicit def MapToMapNode(s: Map[Node, Node]) = MapNode(s)
implicit def bArrToByteArrayNode(s: Array[Byte]) = ByteArrayNode(s)
implicit def ByteArrayNodeToBArr(t: ByteArrayNode) = t.value
implicit def NodeArrayToNode(s: Array[Node]) = NodeArrayNode(s)
implicit def NodeToNodeArrayNode(t: NodeArrayNode) = t.value
implicit def ExtDataToNode(s: ExtData) = ExtNode(s)
implicit def NodeToExtNode(t: ExtNode) = t.value
implicit def MapNodeToNode(s: Map[Node, Node]) = MapNode(s)
implicit def NodeToMapNode(t: MapNode) = t.value
implicit def ArrayNodeToNode(s: Array[Node]) = ArrayNode(s)
implicit def NodeToArrayNode(t: ArrayNode) = t.value
implicit def StringNodeToNode(s: String) = StringNode(s)
implicit def NodeToStringNode(t: StringNode) = t.value
implicit def BooleanNodeToNode(s: Boolean) = BooleanNode(s)
implicit def NodeToBooleanNode(t: BooleanNode) = t.value
implicit def NodeToNullNode(t: NullNode) = null
implicit def ShortIntNodeToNode(s: Short) = BigIntNode(s.asInstanceOf[Int])
implicit def NodeToShortIntNode(t: ShortIntNode) = t.value
implicit def IntNodeToNode(s: Int) = BigIntNode(s)
implicit def NodeToIntNode(t: IntNode) = t.value
implicit def LongIntNodeToNode(s: Long) = BigIntNode(s)
implicit def NodeToLongIntNode(t: LongIntNode) = t.value
implicit def BigIntIntNodeToNode(s: BigInt) = BigIntNode(s)
implicit def NodeToBigIntIntNode(t: BigIntNode) = t.value
implicit def FloatNodeToNode(s: Float) = DoubleNode(s)
implicit def NodeToFloatNode(t: FloatNode) = t.value
implicit def DoubleNodeToNode(s: Double) = DoubleNode(s)
implicit def NodeToDoubleNode(t: DoubleNode) = t.value
}
}
| SimSimY/scala-msgpack-rawparser | src/main/scala/info/simsimy/MsgPackParser/EasyParser.scala | Scala | apache-2.0 | 4,680 |
package play.api.test.ops
import akka.stream.Materializer
import org.scalatest.{Args, AsyncFreeSpec, Status => TestStatus}
import play.api.http.{MimeTypes, Status}
import play.api.libs.json.{JsValue, Json}
import play.api.mvc._
import play.api.test._
import play.api.{Application, Play}
import scala.concurrent.ExecutionContext
class AsyncResultExtractorsSpec extends AsyncFreeSpec
with AsyncResultExtractors
with EssentialActionCaller
with Writeables {
implicit private lazy val app: Application = FakeApplication()
implicit private lazy val mat: Materializer = app.materializer
implicit private lazy val ec: ExecutionContext = mat.executionContext
protected override def runTests(testName: Option[String], args: Args): TestStatus = {
// Use a single application for all the suites
Play.start(app)
val resultStatus = super.runTests(testName, args)
resultStatus.whenCompleted(_ => Play.stop(app))
resultStatus
}
class TestEchoController extends Controller {
def echoTextBody: EssentialAction = Action { request =>
Ok(request.body.asText.getOrElse("Missing body"))
}
def echoJsonBody: Action[JsValue] = Action(parse.json) { request =>
Ok(request.body)
}
def echoJsonInHeader: EssentialAction = Action(parse.json) { request =>
val name = (request.body \ "name").as[String]
val value = (request.body \ "value").as[String]
Ok.withHeaders(name -> value)
}
def echoJsonInCookie: EssentialAction = Action(parse.json) { request =>
val name = (request.body \ "name").as[String]
val value = (request.body \ "value").as[String]
Ok.withCookies(Cookie(name, value))
}
def echoJsonInSession: EssentialAction = Action(parse.json) { request =>
val sessionData = request.body.as[Map[String, String]]
Ok.withSession(sessionData.toSeq: _*)
}
def echoJsonInFlash: EssentialAction = Action(parse.json) { request =>
val flashData = request.body.as[Map[String, String]]
Ok.flashing(flashData.toSeq: _*)
}
def redirectToBody: EssentialAction = Action(parse.json) { request =>
val url = (request.body \ "url").as[String]
val status = (request.body \ "status").as[Int]
Redirect(url, status)
}
}
protected def method(name: String) = s"play25.AsyncResultExtractors.$name"
behave like parsesContentUsing("ActorMaterializer", app.materializer)
behave like parsesContentUsing("NoMaterializer", NoMaterializer)
protected def parsesContentUsing(materializerName: String, contentMaterializer: Materializer): Unit = {
s"${method("contentAsString")}($materializerName) should extract the expected text" in {
val ctrl = new TestEchoController
val testString = "test"
val request = FakeRequest("POST", s"/test/contentAsString?mat=$materializerName").withTextBody(testString)
for {
result <- call(ctrl.echoTextBody, request)
resultBody <- contentAsString(result)(implicitly, contentMaterializer)
} yield {
assertResult(testString) {
resultBody
}
}
}
s"${method("contentAsJson")}($materializerName) should extract the expected json" in {
val ctrl = new TestEchoController
val testJson = Json.obj("expected" -> "json")
val request = FakeRequest("POST", s"/test/contentAsJson?mat=$materializerName").withJsonBody(testJson)
for {
result <- call(ctrl.echoJsonBody, request)
resultBody <- contentAsJson(result)(implicitly, contentMaterializer)
} yield {
assertResult(testJson) {
resultBody
}
}
}
}
s"${method("status")} should return the status code" in {
val ctrl = new TestEchoController
val request = FakeRequest("POST", "/test/status")
for {
result <- call(ctrl.echoTextBody, request)
} yield {
assertResult(Status.OK) {
status(result)
}
}
}
s"${method("contentType")} should extract the expected content type" in {
val ctrl = new TestEchoController
val testJson = Json.obj()
val request = FakeRequest("POST", "/test/contentType").withJsonBody(testJson)
for {
result <- call(ctrl.echoJsonBody, request)
} yield {
assertResult(Some(MimeTypes.JSON)) {
contentType(result)
}
}
}
s"${method("charset")} should extract the expected charset" in {
val ctrl = new TestEchoController
val testString = "test"
val request = FakeRequest("POST", "/test/charset").withTextBody(testString)
for {
result <- call(ctrl.echoTextBody, request)
} yield {
assertResult(Some(Codec.utf_8.charset)) {
charset(result)
}
}
}
s"${method("header")} should extract the expected header" in {
val ctrl = new TestEchoController
val expectedHeaderName = "expected"
val expectedHeaderValue = "value"
val request = FakeRequest("POST", "/test/header").withJsonBody(Json.obj(
"name" -> expectedHeaderName,
"value" -> expectedHeaderValue
))
for {
result <- call(ctrl.echoJsonInHeader, request)
} yield {
assertResult(Some(expectedHeaderValue)) {
header(expectedHeaderName, result)
}
}
}
s"${method("cookies")} should extract the expected cookie" in {
val ctrl = new TestEchoController
val expectedCookie = Cookie("expected", "cookie")
val request = FakeRequest("POST", "/test/cookie").withJsonBody(Json.obj(
"name" -> expectedCookie.name,
"value" -> expectedCookie.value
))
for {
result <- call(ctrl.echoJsonInCookie, request)
} yield {
assertResult(Some(expectedCookie)) {
cookies(result).get(expectedCookie.name)
}
}
}
s"${method("session")} should extract the expected session data" in {
val ctrl = new TestEchoController
val expectedSession = Session(Map("k1" -> "v1", "k2" -> "v2"))
val request = FakeRequest("POST", "/test/session").withJsonBody(Json.toJson(expectedSession.data))
for {
result <- call(ctrl.echoJsonInSession, request)
} yield {
assertResult(expectedSession) {
session(result)
}
}
}
s"${method("flash")} should extract the expected flash data" in {
val ctrl = new TestEchoController
val expectedFlash = Flash(Map("k1" -> "v1", "k2" -> "v2"))
val request = FakeRequest("POST", "/test/flash").withJsonBody(Json.toJson(expectedFlash.data))
for {
result <- call(ctrl.echoJsonInFlash, request)
} yield {
assertResult(expectedFlash) {
flash(result)
}
}
}
s"${method("redirectLocation")} should extract the expected redirect url from 301" in {
val ctrl = new TestEchoController
val redirectUrl = "test redirect"
val request = FakeRequest("POST", "/test/redirect").withJsonBody(Json.obj(
"url" -> redirectUrl,
"status" -> Status.MOVED_PERMANENTLY
))
for {
result <- call(ctrl.redirectToBody, request)
} yield {
assertResult(Some(redirectUrl)) {
redirectLocation(result)
}
}
}
s"${method("redirectLocation")} should extract the expected redirect url from 302" in {
val ctrl = new TestEchoController
val redirectUrl = "test redirect"
val request = FakeRequest("POST", "/test/redirect").withJsonBody(Json.obj(
"url" -> redirectUrl,
"status" -> Status.FOUND
))
for {
result <- call(ctrl.redirectToBody, request)
} yield {
assertResult(Some(redirectUrl)) {
redirectLocation(result)
}
}
}
s"${method("redirectLocation")} should extract the expected redirect url from 303" in {
val ctrl = new TestEchoController
val redirectUrl = "test redirect"
val request = FakeRequest("POST", "/test/redirect").withJsonBody(Json.obj(
"url" -> redirectUrl,
"status" -> Status.SEE_OTHER
))
for {
result <- call(ctrl.redirectToBody, request)
} yield {
assertResult(Some(redirectUrl)) {
redirectLocation(result)
}
}
}
s"${method("redirectLocation")} should extract the expected redirect url from 307" in {
val ctrl = new TestEchoController
val redirectUrl = "test redirect"
val request = FakeRequest("POST", "/test/redirect").withJsonBody(Json.obj(
"url" -> redirectUrl,
"status" -> Status.TEMPORARY_REDIRECT
))
for {
result <- call(ctrl.redirectToBody, request)
} yield {
assertResult(Some(redirectUrl)) {
redirectLocation(result)
}
}
}
s"${method("redirectLocation")} should NOT extract a redirect url from a 400" in {
val ctrl = new TestEchoController
val redirectUrl = "test redirect"
val request = FakeRequest("POST", "/test/redirect").withJsonBody(Json.obj(
"url" -> redirectUrl,
"status" -> Status.BAD_REQUEST
))
for {
result <- call(ctrl.redirectToBody, request)
} yield {
assertResult(None) {
redirectLocation(result)
}
}
}
}
| jeffmay/play-test-ops | play25-core/src/test/scala/play/api/test/ops/AsyncResultExtractorsSpec.scala | Scala | apache-2.0 | 9,005 |
package eu.pulsation.slickexample
import scala.concurrent._
import scala.language.implicitConversions
import ExecutionContext.Implicits.global
import scala.slick.driver.SQLiteDriver.simple._
import scala.slick.jdbc.meta.MTable
import android.app.Activity
import android.os.Bundle
import android.view.View
import android.widget.{Toast, EditText, TextView}
import android.text.method.ScrollingMovementMethod
class SlickAndroidExample extends Activity
{
// Table name in the SQL database
final val TableName = "MY_DATA"
// Table definition
class MyData(tag: Tag) extends Table[(Int, String)](tag, TableName) {
def id = column[Int]("ID", O.PrimaryKey, O.AutoInc) // This is the primary key column.
def name = column[String]("SOME_TEXT")
// Every table needs a * projection with the same type as the table's type parameter.
def * = (id, name)
}
// Table representation instance
val myData = TableQuery[MyData]
// Database connection
lazy val db = Database.forURL("jdbc:sqlite:" +
getApplicationContext().getFilesDir() +
"slick-sandbox.txt", driver = "org.sqldroid.SQLDroidDriver")
// Views
lazy val mText : TextView = findViewById(R.id.text) match { case t : TextView => t }
lazy val mEdit : EditText = findViewById(R.id.data) match { case e : EditText => e }
/**
* Create the table if needed
*/
def createTable() = {
db withSession { implicit session =>
if (MTable.getTables(TableName).list().isEmpty) {
myData.ddl.create
}
}
}
/**
* Fetch all rows
* @return rows
*/
def fetchRows() = {
db withSession {
implicit session =>
// Get existing rows
myData.list
}
}
/**
* Display data in text view
* @param rows Data to display
*/
def displayDataList(rows : List[(Int,String)]) = {
mText.setText("")
rows foreach({ case (id : Int, name: String) =>
mText.append(id +
". " + name +
System.getProperty("line.separator"))
})
}
/**
* Add one row to table
*/
def saveData() : Unit = {
// This is an example usage of an implicit database session.
db withSession {
implicit session =>
// Add a row to the existing data set
myData += (0, mEdit.getText().toString)
}
}
/**
* Remove data from table
*/
def clearData() : Unit = {
// In opposition to saveData(), this is an example of using
// an explicit session. It could have been implicit as well.
val session = db.createSession()
// Delete all rows
myData.delete(session)
}
// Implicit conversion to Runnable when called by runOnUiThread().
implicit def toRunnable[F](f: => F): Runnable = new Runnable() { def run() = f }
/**
* Process data, then fetch all data to update the UI.
* @param process
*/
def processThenDisplay(process : () => Unit) : Future[List[(Int, String)]] = {
val fProcessData = Future { process() }
val fFetchData : Future[List[(Int, String)]] = fProcessData map((nothing) => {
// This will be executed after data has been processed.
fetchRows()
})
fFetchData onSuccess {
case rows =>
runOnUiThread({ displayDataList(rows) })
}
fFetchData
}
// Initialize table
lazy val initFuture = processThenDisplay(createTable)
/**
* Called when the activity is first created.
*/
override def onCreate(savedInstanceState : Bundle)
{
super.onCreate(savedInstanceState)
setContentView(R.layout.main)
mText.setMovementMethod(new ScrollingMovementMethod())
initFuture
}
/**
* Displays an error message
*/
def notifyError() {
Toast.makeText(getApplicationContext(), "Database not initialized yet", Toast.LENGTH_SHORT).show()
}
/**
* Clear table data
*/
def clearText(view : View) {
if (initFuture.isCompleted) {
processThenDisplay(clearData)
} else {
notifyError()
}
}
/**
* Insert edit text contents into database
*/
def addContent(view : View) {
if (initFuture.isCompleted) {
processThenDisplay(saveData)
} else {
notifyError()
}
}
}
| pulsation/slick-android-example | src/eu/pulsation/slickexample/SlickAndroidExample.scala | Scala | bsd-2-clause | 4,143 |
package org.littlewings.infinispan.icklequery
import org.infinispan.Cache
import org.infinispan.manager.DefaultCacheManager
import org.infinispan.objectfilter.ParsingException
import org.infinispan.query.Search
import org.scalatest.{FunSuite, Matchers}
class IckleQuerySpec extends FunSuite with Matchers {
val books: Array[Book] = Array(
Book("978-4798142470", "Spring徹底入門 Spring FrameworkによるJavaアプリケーション開発", 4320, "Spring"),
Book("978-4774182179", "[改訂新版]Spring入門 ――Javaフレームワーク・より良い設計とアーキテクチャ", 4104, "Spring"),
Book("978-4774161631", "[改訂新版] Apache Solr入門 ~オープンソース全文検索エンジン", 3888, "全文検索"),
Book("978-4048662024", "高速スケーラブル検索エンジン ElasticSearch Server", 6915, "全文検索"),
Book("978-4774183169", "パーフェクト Java EE", 3456, "Java EE"),
Book("978-4798140926", "Java EE 7徹底入門 標準Javaフレームワークによる高信頼性Webシステムの構築", 4104, "Java EE")
)
test("index-less simple Ickle Query") {
withCache[String, Book]("bookCache", 3) { cache =>
books.foreach(b => cache.put(b.isbn, b))
val queryFactory = Search.getQueryFactory(cache)
val query =
queryFactory.create(
"""|from org.littlewings.infinispan.icklequery.Book b
|where b.price > 5000
|and b.title = '高速スケーラブル検索エンジン ElasticSearch Server'""".stripMargin)
val resultBooks = query.list[Book]()
resultBooks should have size (1)
resultBooks.get(0).getIsbn should be("978-4048662024")
resultBooks.get(0).getTitle should be("高速スケーラブル検索エンジン ElasticSearch Server")
resultBooks.get(0).getPrice should be(6915)
resultBooks.get(0).getCategory should be("全文検索")
}
}
test("index-less simple Ickle Query, parameterized") {
withCache[String, Book]("bookCache", 3) { cache =>
books.foreach(b => cache.put(b.isbn, b))
val queryFactory = Search.getQueryFactory(cache)
val query =
queryFactory.create(
"""|from org.littlewings.infinispan.icklequery.Book b
|where b.price > :price
|and b.title = :title""".stripMargin)
query.setParameter("price", 5000)
query.setParameter("title", "高速スケーラブル検索エンジン ElasticSearch Server")
val resultBooks = query.list[Book]()
resultBooks should have size (1)
resultBooks.get(0).getIsbn should be("978-4048662024")
resultBooks.get(0).getTitle should be("高速スケーラブル検索エンジン ElasticSearch Server")
resultBooks.get(0).getPrice should be(6915)
resultBooks.get(0).getCategory should be("全文検索")
}
}
test("index-less simple Ickle Query, aggregation") {
withCache[String, Book]("bookCache", 3) { cache =>
books.foreach(b => cache.put(b.isbn, b))
val queryFactory = Search.getQueryFactory(cache)
val query =
queryFactory.create(
"""|select b.category, sum(b.price)
|from org.littlewings.infinispan.icklequery.Book b
|where b.price > :price
|group by b.category
|having sum(b.price) > :sumPrice
|order by sum(b.price) desc""".stripMargin)
query.setParameter("price", 4000)
query.setParameter("sumPrice", 5000)
val results = query.list[Array[AnyRef]]()
results should have size (2)
results.get(0)(0) should be("Spring")
results.get(0)(1) should be(8424)
results.get(1)(0) should be("全文検索")
results.get(1)(1) should be(6915)
}
}
val indexedBooks: Array[IndexedBook] = Array(
IndexedBook("978-4798142470", "Spring徹底入門 Spring FrameworkによるJavaアプリケーション開発", 4320, "Spring"),
IndexedBook("978-4774182179", "[改訂新版]Spring入門 ――Javaフレームワーク・より良い設計とアーキテクチャ", 4104, "Spring"),
IndexedBook("978-4774161631", "[改訂新版] Apache Solr入門 ~オープンソース全文検索エンジン", 3888, "全文検索"),
IndexedBook("978-4048662024", "高速スケーラブル検索エンジン ElasticSearch Server", 6915, "全文検索"),
IndexedBook("978-4774183169", "パーフェクト Java EE", 3456, "Java EE"),
IndexedBook("978-4798140926", "Java EE 7徹底入門 標準Javaフレームワークによる高信頼性Webシステムの構築", 4104, "Java EE")
)
test("indexed entity Ickle Query, full text query") {
withCache[String, IndexedBook]("indexedBookCache", 3) { cache =>
indexedBooks.foreach(b => cache.put(b.isbn, b))
val queryFactory = Search.getQueryFactory(cache)
val query =
queryFactory.create(
"""|from org.littlewings.infinispan.icklequery.IndexedBook b
|where b.price < 5000
|and b.title: '全文検索'""".stripMargin)
val resultBooks = query.list[IndexedBook]()
resultBooks should have size (1)
resultBooks.get(0).getIsbn should be("978-4774161631")
resultBooks.get(0).getTitle should be("[改訂新版] Apache Solr入門 ~オープンソース全文検索エンジン")
resultBooks.get(0).getPrice should be(3888)
resultBooks.get(0).getCategory should be("全文検索")
}
}
test("indexed entity Ickle Query, analyzed field can't applied eq") {
withCache[String, IndexedBook]("indexedBookCache", 3) { cache =>
indexedBooks.foreach(b => cache.put(b.isbn, b))
val queryFactory = Search.getQueryFactory(cache)
val thrown =
the[ParsingException] thrownBy
queryFactory.create(
"""|from org.littlewings.infinispan.icklequery.IndexedBook b
|where b.price > 5000
|and b.title = '全文検索'""".stripMargin)
thrown.getMessage should be("ISPN028522: No relational queries can be applied to property 'title' in type org.littlewings.infinispan.icklequery.IndexedBook since the property is analyzed.")
}
}
test("index-less Ickle Query, can't applied full text predicate") {
withCache[String, Book]("bookCache", 3) { cache =>
books.foreach(b => cache.put(b.isbn, b))
val queryFactory = Search.getQueryFactory(cache)
val thrown =
the[ParsingException] thrownBy
queryFactory.create(
"""|from org.littlewings.infinispan.icklequery.Book b
|where b.title: '高速スケーラブル検索エンジン ElasticSearch Server'""".stripMargin)
thrown.getMessage should be("ISPN028521: Full-text queries cannot be applied to property 'title' in type org.littlewings.infinispan.icklequery.Book unless the property is indexed and analyzed.")
}
}
test("indexed entity Ickle Query, full text query, aggregation") {
withCache[String, IndexedBook]("indexedBookCache", 3) { cache =>
indexedBooks.foreach(b => cache.put(b.isbn, b))
val queryFactory = Search.getQueryFactory(cache)
val query =
queryFactory.create(
"""|select b.category, sum(b.price)
|from org.littlewings.infinispan.icklequery.IndexedBook b
|where b.title: (+'入門' and -'検索')
|group by b.category
|order by sum(b.price) desc""".stripMargin)
val results = query.list[Array[AnyRef]]()
results should have size (2)
results.get(0)(0) should be("Spring")
results.get(0)(1) should be(8424)
results.get(1)(0) should be("Java EE")
results.get(1)(1) should be(4104)
}
}
protected def withCache[K, V](cacheName: String, numInstances: Int = 1)(fun: Cache[K, V] => Unit): Unit = {
val managers = (1 to numInstances).map(_ => new DefaultCacheManager("infinispan.xml"))
managers.foreach(_.getCache(cacheName))
try {
val cache = managers(0).getCache[K, V](cacheName)
fun(cache)
cache.stop()
} finally {
managers.foreach(_.stop())
}
}
}
| kazuhira-r/infinispan-getting-started | embedded-ickle-query/src/test/scala/org/littlewings/infinispan/icklequery/IckleQuerySpec.scala | Scala | mit | 8,114 |
/*
* Copyright 2010 LinkedIn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.consumer
import java.net._
import java.nio._
import java.nio.channels._
import java.util.concurrent.atomic._
import org.apache.log4j.Logger
import kafka.api._
import kafka.common._
import kafka.message._
import kafka.network._
import kafka.utils._
/**
* A consumer of kafka messages
*/
@threadsafe
class SimpleConsumer(val host: String,
val port: Int,
val soTimeout: Int,
val bufferSize: Int) {
private val logger = Logger.getLogger(getClass())
private var channel : SocketChannel = null
private val lock = new Object()
private def connect(): SocketChannel = {
val address = new InetSocketAddress(host, port)
val channel = SocketChannel.open
if(logger.isDebugEnabled)
logger.debug("Connected to " + address + " for fetching.")
channel.configureBlocking(true)
channel.socket.setReceiveBufferSize(bufferSize)
channel.socket.setSoTimeout(soTimeout)
channel.connect(address)
if(logger.isTraceEnabled)
logger.trace("requested receive buffer size=" + bufferSize + " actual receive buffer size= " + channel.socket.getReceiveBufferSize)
channel
}
private def close(channel: SocketChannel) = {
if(logger.isDebugEnabled)
logger.debug("Disconnecting from " + channel.socket.getRemoteSocketAddress())
Utils.swallow(logger.warn, channel.close())
Utils.swallow(logger.warn, channel.socket.close())
}
def close() {
if (channel != null)
close(channel)
channel = null
}
/**
* Fetch a set of messages from the given byte offset, no more than maxSize bytes are fetched.
*/
def fetch(request: FetchRequest): ByteBufferMessageSet = {
lock synchronized {
val startTime = SystemTime.nanoseconds
getOrMakeConnection()
var response: Tuple2[Receive,Int] = null
try {
sendRequest(request)
response = getResponse
} catch {
case e : java.io.IOException =>
logger.info("fetch reconnect due to " + e)
// retry once
try {
channel = connect
sendRequest(request)
response = getResponse
}catch {
case ioe: java.io.IOException => channel = null; throw ioe;
}
}
val endTime = SystemTime.nanoseconds
SimpleConsumerStats.recordFetchRequest(endTime - startTime)
new ByteBufferMessageSet(response._1.buffer, response._2)
}
}
def multifetch(fetches: java.util.List[FetchRequest]): MultiFetchResponse = {
val fetchesArray = fetches.toArray(new Array[FetchRequest](fetches.size))
multifetch(fetchesArray:_*)
}
def multifetch(fetches: FetchRequest*): MultiFetchResponse = {
lock synchronized {
val startTime = SystemTime.nanoseconds
getOrMakeConnection()
var response: Tuple2[Receive,Int] = null
try {
sendRequest(new MultiFetchRequest(fetches.toArray))
response = getResponse
} catch {
case e : java.io.IOException =>
logger.info("multifetch reconnect due to " + e)
// retry once
try {
channel = connect
sendRequest(new MultiFetchRequest(fetches.toArray))
response = getResponse
}catch {
case ioe: java.io.IOException => channel = null; throw ioe;
}
}
val endTime = SystemTime.nanoseconds
SimpleConsumerStats.recordFetchRequest(endTime - startTime)
// error code will be set on individual messageset inside MultiFetchResponse
new MultiFetchResponse(response._1.buffer, fetches.length)
}
}
/**
* Get a list of valid offsets (up to maxSize) before the given time.
* The result is a list of offsets, in descending order.
* @param time: time in millisecs (if -1, just get from the latest available)
*/
def getOffsetsBefore(topic: String, partition: Int, time: Long, maxNumOffsets: Int): Array[Long] = {
lock synchronized {
getOrMakeConnection()
var response: Tuple2[Receive,Int] = null
try {
sendRequest(new OffsetRequest(topic, partition, time, maxNumOffsets))
response = getResponse
} catch {
case e : java.io.IOException =>
logger.info("getOffsetsBefore reconnect due to " + e)
// retry once
try {
channel = connect
sendRequest(new OffsetRequest(topic, partition, time, maxNumOffsets))
response = getResponse
}catch {
case ioe: java.io.IOException => channel = null; throw ioe;
}
}
OffsetRequest.deserializeOffsetArray(response._1.buffer)
}
}
private def sendRequest(request: Request) = {
val send = new BoundedByteBufferSend(request)
send.writeCompletely(channel)
}
private def getResponse(): Tuple2[Receive,Int] = {
val response = new BoundedByteBufferReceive()
response.readCompletely(channel)
// this has the side effect of setting the initial position of buffer correctly
val errorCode: Int = response.buffer.getShort
(response, errorCode)
}
private def getOrMakeConnection() {
if(channel == null) {
channel = connect()
}
}
}
trait SimpleConsumerStatsMBean {
def getFetchRequestsPerSecond: Double
def getAvgFetchRequestMs: Double
def getMaxFetchRequestMs: Double
def getNumFetchRequests: Long
}
@threadsafe
class SimpleConsumerStats extends SimpleConsumerStatsMBean {
private val fetchRequestStats = new SnapshotStats
def recordFetchRequest(requestNs: Long) = fetchRequestStats.recordRequestMetric(requestNs)
def getFetchRequestsPerSecond: Double = fetchRequestStats.getRequestsPerSecond
def getAvgFetchRequestMs: Double = fetchRequestStats.getAvgMetric / (1000.0 * 1000.0)
def getMaxFetchRequestMs: Double = fetchRequestStats.getMaxMetric / (1000.0 * 1000.0)
def getNumFetchRequests: Long = fetchRequestStats.getNumRequests
}
object SimpleConsumerStats {
private val logger = Logger.getLogger(getClass())
private val simpleConsumerstatsMBeanName = "kafka:type=kafka.SimpleConsumerStats"
private val stats = new SimpleConsumerStats
Utils.swallow(logger.warn, Utils.registerMBean(stats, simpleConsumerstatsMBeanName))
def recordFetchRequest(requestMs: Long) = stats.recordFetchRequest(requestMs)
}
| jinfei21/kafka | src/kafka/consumer/SimpleConsumer.scala | Scala | apache-2.0 | 6,924 |
package org.scalawiki.wlx
import org.scalawiki.wlx.dto.Monument
object RegionFixer {
def fixLists(monumentDb: MonumentDB) {
ListUpdater.updateLists(monumentDb, new RegionFixerUpdater(monumentDb))
}
}
class RegionFixerUpdater(monumentDb: MonumentDB) extends MonumentUpdater {
val contest = monumentDb.contest
val country = contest.country
val oblasts = country.regions.filter(adm => !Set("Київ", "Севастополь").contains(adm.name))
val raions = oblasts.flatMap(_.regions).filter(_.name.endsWith("район"))
val raionNames = raions.map(_.name).toSet
val nameParam = contest.uploadConfigs.head.listConfig.namesMap("city")
val maxIndex = 2
def updatedParams(m: Monument): Map[String, String] = {
getIndex(m).flatMap { index =>
val fixedPlace = m.place.flatMap(_.split(",").toList.lift(index))
fixedPlace.map { place =>
Map(nameParam -> place)
}
}.getOrElse(Map.empty)
}
def needsUpdate(m: Monument): Boolean = {
(monumentDb.getAdmDivision(m.id).isEmpty || raionNames.contains(m.cityName)) && getIndex(m).nonEmpty
}
def getIndex(m: Monument): Option[Int] = {
m.place.flatMap { p =>
val namesList = p.split(",").toBuffer
(0 to maxIndex).find { index =>
namesList.lift(index).exists { c =>
country.byIdAndName(m.id.take(6), c, m.cityType).size == 1
}
}
}
}
} | intracer/scalawiki | scalawiki-wlx/src/main/scala/org/scalawiki/wlx/RegionFixer.scala | Scala | apache-2.0 | 1,401 |
/*
* Copyright 2015 - 2016 Red Bull Media House GmbH <http://www.redbullmediahouse.com> - all rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rbmhtechnology.eventuate.crdt.pure
import com.rbmhtechnology.eventuate.VectorTime
import com.rbmhtechnology.eventuate.crdt.pure.StabilityProtocol._
import org.scalatest.Matchers
import org.scalatest.WordSpecLike
class StabilityProtocolSpec extends WordSpecLike with Matchers {
val A = "A"
val B = "B"
val C = "C"
def partitions: Set[String] = Set(A, B, C)
def initialRTM = RTM(StabilityConf(A, partitions))
def vt(a: Long, b: Long, c: Long) = VectorTime(A -> a, B -> b, C -> c)
def tcstable(a: Long, b: Long, c: Long) = Some(TCStable(vt(a, b, c)))
"Stability" should {
"drop updates from local partition" in {
initialRTM
.update(A, vt(1, 1, 1))
.update(B, vt(2, 2, 2))
.update(C, vt(2, 2, 2))
.stable shouldBe tcstable(2, 2, 2)
}
"not emit tcstable when B = (1,1,1), C = unknown " in {
initialRTM
.update(B, vt(1, 1, 1))
.stable shouldBe None
}
"emit TCStable(0,1) when B = (0,1,1), C = (0,0,1) " in {
initialRTM
.update(B, vt(0, 1, 1))
.update(C, vt(0, 0, 1))
.stable shouldBe tcstable(0, 0, 1)
}
"emit TCStable(1,1) when A = (1,1,1), B = (1,1,1)" in {
initialRTM
.update(B, vt(1, 1, 1))
.update(C, vt(1, 1, 1))
.stable shouldBe tcstable(1, 1, 1)
}
"emit TCStable(1,1) when A = (2,1), B = (1,2)" in {
initialRTM
.update(B, vt(2, 2, 1))
.update(C, vt(1, 1, 2))
.stable shouldBe tcstable(1, 1, 1)
}
}
}
| RBMHTechnology/eventuate | eventuate-crdt-pure/src/test/scala/com/rbmhtechnology/eventuate/crdt/StabilityProtocolSpec.scala | Scala | apache-2.0 | 2,206 |
package sexybash
class DefaultAstFunctionBuilder extends AstFunctionBuilder {
import AstFunctionBuilder._
// TODO mmm I don't have state here
override def build = new DefaultAstFunction
def process(c: Char, state: State): Either[Throwable, (State, AstFunctionBuilder)] = (state, c) match {
case (NOOP, c: Char) if c != '[' ⇒ Right((NOOP, this))
case (NOOP, Comment) ⇒ Right((COMMENT, this))
case (COMMENT, NewLine) ⇒ Right((NOOP, this))
case (COMMENT, _) ⇒ Right((COMMENT, this))
case (NOOP, '[') ⇒ Right((STRINGBLOCKSTARTED, StringBlockAstFunctionBuilder.apply))
}
}
| MarianoGappa/sexybash | src/main/scala/sexybash/DefaultAstFunctionBuilder.scala | Scala | mit | 663 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.rest.k8s
import java.io.{ByteArrayInputStream, File}
import java.nio.file.Paths
import com.google.common.io.Files
import org.scalatest.BeforeAndAfter
import org.apache.spark.SparkFunSuite
import org.apache.spark.util.Utils
private[spark] class StagedResourcesStoreSuite extends SparkFunSuite with BeforeAndAfter {
private val resourceBytes = Array[Byte](1, 2, 3, 4)
private val namespace = "namespace"
private var dependencyRootDir: File = _
private var stagedResourcesStore: StagedResourcesStore = _
before {
dependencyRootDir = Utils.createTempDir()
stagedResourcesStore = new StagedResourcesStoreImpl(dependencyRootDir)
}
after {
dependencyRootDir.delete()
}
test("Uploads should write data to the underlying disk") {
val resourceIdAndSecret = Utils.tryWithResource(new ByteArrayInputStream(resourceBytes)) {
resourceStream =>
stagedResourcesStore.addResources(namespace, resourceStream)
}
val resourceNamespaceDir = Paths.get(dependencyRootDir.getAbsolutePath, "namespace").toFile
assert(resourceNamespaceDir.isDirectory, s"Resource namespace dir was not created at" +
s" ${resourceNamespaceDir.getAbsolutePath} or is not a directory.")
val resourceDirs = resourceNamespaceDir.listFiles()
assert(resourceDirs.length === 1, s"Resource root directory did not have exactly one" +
s" subdirectory. Got: ${resourceDirs.map(_.getAbsolutePath).mkString(",")}")
assert(resourceDirs(0).getName === resourceIdAndSecret.resourceId)
val resourceTgz = new File(resourceDirs(0), "resources.data")
assert(resourceTgz.isFile,
s"Resources written to ${resourceTgz.getAbsolutePath} does not exist or is not a file.")
val resourceTgzBytes = Files.toByteArray(resourceTgz)
assert(resourceTgzBytes.toSeq === resourceBytes.toSeq, "Incorrect resource bytes were written.")
}
test("Uploading and then getting should return a stream with the written bytes.") {
val resourceIdAndSecret = Utils.tryWithResource(new ByteArrayInputStream(resourceBytes)) {
resourceStream =>
stagedResourcesStore.addResources(namespace, resourceStream)
}
val resources = stagedResourcesStore.getResources(resourceIdAndSecret.resourceId)
assert(resources.map(_.resourcesFile)
.map(Files.toByteArray)
.exists(resourceBytes.sameElements(_)))
assert(resources.exists(_.resourceId == resourceIdAndSecret.resourceId))
assert(resources.exists(_.resourceSecret == resourceIdAndSecret.resourceSecret))
}
test("Uploading and then deleting should result in the resource directory being deleted.") {
val resourceIdAndSecret = Utils.tryWithResource(new ByteArrayInputStream(resourceBytes)) {
resourceStream =>
stagedResourcesStore.addResources(namespace, resourceStream)
}
stagedResourcesStore.removeResources(resourceIdAndSecret.resourceId)
val resourceNamespaceDir = Paths.get(dependencyRootDir.getAbsolutePath, "namespace").toFile
assert(resourceNamespaceDir.listFiles().isEmpty)
assert(stagedResourcesStore.getResources(resourceIdAndSecret.resourceId).isEmpty)
}
}
| publicRoman/spark | resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/rest/k8s/StagedResourcesStoreSuite.scala | Scala | apache-2.0 | 3,970 |
///*
// * DARWIN Genetic Algorithms Framework Project.
// * Copyright (c) 2003, 2005, 2007, 2009, 2011, 2016, 2017. Phasmid Software
// *
// * Originally, developed in Java by Rubecula Software, LLC and hosted by SourceForge.
// * Converted to Scala by Phasmid Software and hosted by github at https://github.com/rchillyard/Darwin
// *
// * This file is part of Darwin.
// *
// * Darwin is free software: you can redistribute it and/or modify
// * it under the terms of the GNU General Public License as published by
// * the Free Software Foundation, either version 3 of the License, or
// * (at your option) any later version.
// *
// * This program is distributed in the hope that it will be useful,
// * but WITHOUT ANY WARRANTY; without even the implied warranty of
// * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// * GNU General Public License for more details.
// *
// * You should have received a copy of the GNU General Public License
// * along with this program. If not, see <http://www.gnu.org/licenses/>.
// */
//
//package com.phasmid.darwin.evolution
//
//import com.phasmid.darwin.eco.Ecology
//import com.phasmid.darwin.run.Species
//import com.phasmid.laScala.Version
//import com.phasmid.laScala.values.Incrementable
//
///**
// * Created by scalaprof on 9/30/17.
// *
// * @param name an identifier for this Population
// * @param colonies the colonies which belong to this Population
// * @param version a version representing this generation
// * @param ecology an Ecology for which the members of this Population are adapted
// * @param species the Species of the organisms represented in this Population
// * @tparam B the Base type
// * @tparam G the Gene type
// * @tparam P the Ploidy type
// * @tparam T the Trait type
// * @tparam V the generation type (defined to be Incrementable)
// * @tparam X the underlying type of the xs
// */
//case class Population[B, G, P, T, V: Incrementable, X, Z <: Organism[R, V], Y <: Colony[T, V, X, Z] : ColonyBuilder](name: String, colonies: Iterable[Y], version: Version[V], ecology: Ecology[T, X], species: Species[B, G, P, T, X]) extends BaseGenerational[V, Population[B, G, P, T, V, X, Z, Y]](version) {
// val cb: ColonyBuilder[Y] = implicitly[ColonyBuilder[Y]]
// val vi: Incrementable[V] = implicitly[Incrementable[V]]
// /**
// * Method to yield the next generation of this Population
// *
// * @param v the Version for the next generation
// * @return the next generation of this Population as a Repr
// */
// def next(v: Version[V]): Population[B, G, P, T, V, X, Z, Y] = {
// // TODO: remove this use of asInstanceOf -- it should not be necessary
// val zs = for (c <- colonies) yield c.next(v).asInstanceOf[Y]
// Population[B, G, P, T, V, X, Z, Y](name, zs, v, ecology, species)(vi, cb)
// }
//
//}
| rchillyard/Darwin | src/main/scala/com/phasmid/darwin/evolution/Population.scala | Scala | gpl-3.0 | 2,900 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats
import io.truthencode.ddo.model.classes.HeroicCharacterClass
import io.truthencode.ddo.support.requisite.{FeatRequisiteImpl, FreeFeat, GrantsToClass}
/**
* [[http://ddowiki.com/page/Wilderness_Lore Wilderness Lore]] This feat grants represents your
* knowledge of the wilderness. Characters with this feat are granted special quest-specific dialog
* options/object interactions that classes without this feat otherwise could not perform. It may
* also allow certain skill checks to learn insight into specific situations. Barbarian, Druid,
* Ranger received this feat once for every level. Bard received this feat at level 1, 3, 5, 7, 9
* ,11, 13 ,15, 17, 19. Verbatim from the release notes: Many classes now gain the "Arcane Lore",
* "Religious Lore", or "Wilderness Lore" feats every level, which may modify certain dialog options
* or come up in other ways during quests.
* @todo
* Add Lore Trait sub for Arcane, Religious Wilderness etc
*/
protected[feats] trait WildernessLore
extends FeatRequisiteImpl with Passive with StackableFeat with GrantsToClass with FreeFeat {
self: ClassFeat =>
override def grantToClass: Seq[(HeroicCharacterClass, Int)] =
allLevelsClasses.sortBy(_._1.entryName) ++ bardLevels
private def bardLevels =
(1 to 20 by 2).toList.map((HeroicCharacterClass.Bard, _))
private def allLevelsClasses =
for {
c <- List(
HeroicCharacterClass.Barbarian,
HeroicCharacterClass.Druid,
HeroicCharacterClass.Ranger
)
l <- 1 to 20
} yield (c, l)
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/WildernessLore.scala | Scala | apache-2.0 | 2,285 |
/*
* Copyright 2014 Databricks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.databricks.spark.csv
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.{DataFrame, SQLContext}
import com.databricks.spark.csv.util.{ParserLibs, ParseModes, TextFile}
/**
* A collection of static functions for working with CSV files in Spark SQL
*/
class CsvParser {
private var useHeader: Boolean = false
private var csvParsingOpts: CSVParsingOpts = CSVParsingOpts()
private var lineParsingOpts: LineParsingOpts = LineParsingOpts()
private var realNumberParsingOpts: RealNumberParsingOpts = RealNumberParsingOpts()
private var intNumberParsingOpts: IntNumberParsingOpts = IntNumberParsingOpts()
private var stringParsingOpts: StringParsingOpts = StringParsingOpts()
private var comment: Character = '#'
private var schema: StructType = null
private var parseMode: String = ParseModes.DEFAULT
private var parserLib: String = ParserLibs.DEFAULT
private var charset: String = TextFile.DEFAULT_CHARSET.name()
private var inferSchema: Boolean = false
def withUseHeader(flag: Boolean): CsvParser = {
this.useHeader = flag
this
}
def withDelimiter(delimiter: Character): CsvParser = {
this.csvParsingOpts.delimiter = delimiter
this
}
def withQuoteChar(quote: Character): CsvParser = {
this.csvParsingOpts.quoteChar = quote
this
}
def withSchema(schema: StructType): CsvParser = {
this.schema = schema
this
}
def withParseMode(mode: String): CsvParser = {
this.parseMode = mode
this
}
def withEscape(escapeChar: Character): CsvParser = {
this.csvParsingOpts.escapeChar = escapeChar
this
}
def withComment(commentChar: Character) : CsvParser = {
this.comment = commentChar
this
}
def withIgnoreLeadingWhiteSpace(ignore: Boolean): CsvParser = {
this.csvParsingOpts.ignoreLeadingWhitespace = ignore
this
}
def withIgnoreTrailingWhiteSpace(ignore: Boolean): CsvParser = {
this.csvParsingOpts.ignoreTrailingWhitespace = ignore
this
}
def withParserLib(parserLib: String): CsvParser = {
this.parserLib = parserLib
this
}
def withCsvParsingOpts(csvParsingOpts: CSVParsingOpts) = {
this.csvParsingOpts = csvParsingOpts
this
}
def withLineParsingOpts(lineParsingOpts: LineParsingOpts) = {
this.lineParsingOpts = lineParsingOpts
this
}
def withRealNumberParsingOpts(numberParsingOpts: RealNumberParsingOpts) = {
this.realNumberParsingOpts = numberParsingOpts
this
}
def withIntNumberParsingOpts(numberParsingOpts: IntNumberParsingOpts) = {
this.intNumberParsingOpts = numberParsingOpts
this
}
def withStringParsingOpts(stringParsingOpts: StringParsingOpts) = {
this.stringParsingOpts = stringParsingOpts
this
}
def withOpts(optMap: Map[String, String]) = {
this.stringParsingOpts = StringParsingOpts(optMap)
this.lineParsingOpts = LineParsingOpts(optMap)
this.realNumberParsingOpts = RealNumberParsingOpts(optMap)
this.intNumberParsingOpts = IntNumberParsingOpts(optMap)
this.csvParsingOpts = CSVParsingOpts(optMap)
this
}
def withCharset(charset: String): CsvParser = {
this.charset = charset
this
}
def withInferSchema(inferSchema: Boolean) = {
this.inferSchema = inferSchema
this
}
/** Returns a Schema RDD for the given CSV path. */
@throws[RuntimeException]
def csvFile(sqlContext: SQLContext, path: String): DataFrame = {
val relation: CsvRelation = CsvRelation(
path,
useHeader,
csvParsingOpts,
parseMode,
parserLib,
schema,
comment,
lineParsingOpts,
realNumberParsingOpts,
intNumberParsingOpts,
stringParsingOpts,
charset,
inferSchema)(sqlContext)
sqlContext.baseRelationToDataFrame(relation)
}
}
| mohitjaggi/spark-csv | src/main/scala/com/databricks/spark/csv/CsvParser.scala | Scala | apache-2.0 | 4,395 |
package glasskey.util
import java.security.PublicKey
/**
* Created by loande on 6/26/15.
*/
trait PublicKeySource {
def source: String
def getPublicKey(keyId: String): Option[PublicKey]
}
object PublicKeySource {
def apply(src: String, keySourceType: PublicKeySourceType):PublicKeySource = {
keySourceType match {
case JWK => new JWKKeySource {override def source = src}
case FileJWK => new FileJWKKeySource {override def source = src}
case PEMDER => new PEMDERKeySource {override def source = src}
}
}
}
sealed trait PublicKeySourceType
case object JWK extends PublicKeySourceType
case object FileJWK extends PublicKeySourceType
case object PEMDER extends PublicKeySourceType | MonsantoCo/glass-key | glass-key-common/src/main/scala/glasskey/util/PublicKeySource.scala | Scala | bsd-3-clause | 717 |
package org.ferrit.core.parser
import org.ferrit.core.http.Response
import org.ferrit.core.uri.CrawlUri
import org.ferrit.core.util.{MediaType, TagUtil, Stopwatch}
import org.ferrit.core.util.JsoupSugar.elementsToSeq
import org.ferrit.core.util.TagUtil.{CssTagEquiv, CssImportUrl, HtmlUriAttributes}
import org.jsoup.Jsoup
import org.jsoup.nodes.{Document, Element}
import org.jsoup.select.Elements
/**
* A Jsoup backed HtmlParser to extract crawlable links.
* *** Jsoup is totally freakin' cool by the way ***
*/
class HtmlParserJsoup extends ContentParser {
override def canParse(response: Response):Boolean =
MediaType.is(response, MediaType.Html)
override def parse(response: Response):ParserResult = {
if (!canParse(response)) throw new ParseException(
"Cannot parse response"
)
val stopwatch = new Stopwatch
val reqUri = response.request.crawlUri
val content: String = response.contentString
val doc: Document = Jsoup.parse(content)
// Work out if the base URL used for relative links in this document
// will be either the document's own URL or a possible <base> tag.
// (Jsoup can return a <base> with empty href attribute)
val base:CrawlUri = doc.select("base[href]").headOption match {
case Some(e) => e.attr("href").trim match {
case "" => reqUri // assume <base> not found
case href => CrawlUri(reqUri, href)
}
case None => reqUri
}
// Check <meta> for noindex/nofollow directives
val metaQuery = """meta[name=robots][content~=(?i)\\b%s\\b]"""
val head = doc.head
val noIndex = head.select(metaQuery format "noindex").nonEmpty
val noFollow = head.select(metaQuery format "nofollow").nonEmpty
// Check elements with attributes like href/src for links.
// Ignores <base> elements.
var links: List[Link] = List.empty
HtmlUriAttributes.foreach(attr => {
doc.select(s"[$attr]:not(base)").toSeq
.foreach(e => {
val nfLink =
if (noFollow) noFollow
else "nofollow" == e.attr("rel").toLowerCase
val uriAttr = e.attr(attr).trim // e.g. src or href
if (!uriAttr.isEmpty) {
val (uri, failMsg) = makeUri(base, uriAttr)
val link = Link(
e.nodeName,
uriAttr,
e.text,
nfLink,
uri,
failMsg
)
links = link :: links
}
})
})
// Examine <style> for @import url('...')
doc.select("style").toSeq foreach (style => {
val styleLinks = (for {
CssImportUrl(quote, uriAttr) <- CssImportUrl findAllMatchIn style.data
if (!uriAttr.trim.isEmpty)
} yield {
val (absUri, failMsg) = makeUri(base, uriAttr)
Link(CssTagEquiv, uriAttr, "", false, absUri, failMsg)
}).toSeq
links = (styleLinks ++: links)
})
DefaultParserResult(links.toSet, noIndex, noFollow, stopwatch.duration)
}
private def makeUri(base:CrawlUri, uriAttr:String):(Option[CrawlUri], Option[String]) =
try {
(Some(CrawlUri(base, uriAttr)), None)
} catch {
case t: Throwable => (None, Some(s"base[$base] relative[$uriAttr]"))
}
}
| reggoodwin/ferrit | src/main/scala/org/ferrit/core/parser/HtmlParserJsoup.scala | Scala | mit | 3,319 |
/**
* Licensed to the Minutemen Group under one or more contributor license
* agreements. See the COPYRIGHT file distributed with this work for
* additional information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package silhouette.password
import org.mindrot.jbcrypt.BCrypt
import silhouette.password.BCryptPasswordHasher._
import silhouette.util.{ PasswordHasher, PasswordInfo }
/**
* Implementation of the password hasher based on BCrypt.
*
* @param logRounds The log2 of the number of rounds of hashing to apply.
* @see [[http://www.mindrot.org/files/jBCrypt/jBCrypt-0.2-doc/BCrypt.html#gensalt(int) gensalt]]
*/
class BCryptPasswordHasher(logRounds: Int = 10) extends PasswordHasher {
/**
* Gets the ID of the hasher.
*
* @return The ID of the hasher.
*/
override def id: String = ID
/**
* Hashes a password.
*
* This implementation does not return the salt separately because it is embedded in the hashed password.
* Other implementations might need to return it so it gets saved in the backing store.
*
* @param plainPassword The password to hash.
* @return A PasswordInfo containing the hashed password.
*/
override def hash(plainPassword: String): PasswordInfo = PasswordInfo(
hasher = id,
password = BCrypt.hashpw(plainPassword, BCrypt.gensalt(logRounds))
)
/**
* Checks if a password matches the hashed version.
*
* @param passwordInfo The password retrieved from the backing store.
* @param suppliedPassword The password supplied by the user trying to log in.
* @return True if the password matches, false otherwise.
*/
override def matches(passwordInfo: PasswordInfo, suppliedPassword: String): Boolean = {
BCrypt.checkpw(suppliedPassword, passwordInfo.password)
}
/**
* Indicates if a password info hashed with this hasher is deprecated.
*
* In case of the BCrypt password hasher, a password is deprecated if the log rounds have changed.
*
* @param passwordInfo The password info to check the deprecation status for.
* @return True if the given password info is deprecated, false otherwise. If a hasher isn't
* suitable for the given password, this method should return None.
*/
override def isDeprecated(passwordInfo: PasswordInfo): Option[Boolean] = {
Option(isSuitable(passwordInfo)).collect {
case true =>
val LogRoundsPattern(lr) = passwordInfo.password
// Is deprecated if the log rounds has changed
lr != logRounds.toString
}
}
}
/**
* The companion object.
*/
object BCryptPasswordHasher {
/**
* The ID of the hasher.
*/
val ID = "bcrypt"
/**
* The pattern to extract the log rounds from the password string.
*/
val LogRoundsPattern = """^\\$\\w{2}\\$(\\d{1,2})\\$.+""".r
}
| datalek/silhouette | silhouette-password-bcrypt/src/main/scala/silhouette/password/BCryptPasswordHasher.scala | Scala | apache-2.0 | 3,341 |
package com.realizationtime.btdogg.scraping
import akka.actor.{Actor, ActorLogging, ActorRef}
import com.realizationtime.btdogg.commons.TKey
import com.realizationtime.btdogg.scraping.ScrapersHub.{AddScrapers, Message, ScrapeResult, StatRequest}
import com.realizationtime.btdogg.scraping.TorrentScraper.ScrapeRequest
import scala.util.Random
class ScrapersHub extends Actor with ActorLogging {
import context._
private var idPrefixesToScrapers: Map[String, ActorRef] = Map[String, ActorRef]()
.withDefault(keyPrefix => {
val ret = idPrefixesToScrapers.toStream(Random.nextInt(idPrefixesToScrapers.size))
log.debug(s"Found random scraper with prefix ${ret._1} for key $keyPrefix")
ret._2
})
private var requestsCounter = Map[ScrapeRequest, Int]()
.withDefaultValue(0)
private def incrementCounter(request: ScrapeRequest): Unit = {
val incrementedCounter = requestsCounter(request) + 1
requestsCounter += request -> incrementedCounter
}
private def decrementCounter(request: ScrapeRequest): Unit = {
val decrementedValue = requestsCounter(request) - 1
if (decrementedValue <= 0)
requestsCounter -= request
else
requestsCounter += request -> decrementedValue
}
override def preStart(): Unit = {
import scala.language.postfixOps
import scala.concurrent.duration._
context.system.scheduler.schedule(1 minute, 1 minute, self, StatRequest)
}
override def receive: Receive = noNodesReceivedYet(List())
def noNodesReceivedYet(waitingRequests: List[ScrapeRequest]): Receive = {
case key: TKey =>
val request = ScrapeRequest(key, sender())
become(noNodesReceivedYet(request :: waitingRequests), discardOld = true)
case m: Message => m match {
case AddScrapers(newScrapers) =>
idPrefixesToScrapers ++= newScrapers
waitingRequests.foreach(self ! _)
become(working)
case StatRequest => logRequestsStats()
}
}
val working: Receive = {
case request: ScrapeRequest =>
orderScraping(request)
case key: TKey =>
orderScraping(ScrapeRequest(key, sender()))
case res: TorrentScraper.ScrapeResult =>
decrementCounter(res.request)
ScrapeResult.sendToOriginalRecipient(res)
case m: Message => m match {
case AddScrapers(newScrapers) => idPrefixesToScrapers ++= newScrapers
case StatRequest => logRequestsStats()
}
}
private def orderScraping(request: ScrapeRequest) = {
incrementCounter(request)
idPrefixesToScrapers(request.key.prefix) ! request
}
private def logRequestsStats() = {
val requestCount: Int = requestsCounter.values.sum
val perScraper = if (idPrefixesToScrapers.isEmpty) {
0
} else {
requestCount / idPrefixesToScrapers.size
}
log.info(s"Currently processing $requestCount scraping requests, average: $perScraper per scraper")
}
}
object ScrapersHub {
sealed trait Message
final case class AddScrapers(newScrapers: Map[String, ActorRef]) extends Message
case object StatRequest extends Message
final case class ScrapeResult(key: TKey, resultValue: TorrentScraper.ScrapeResult#ResultValue)
object ScrapeResult {
def apply(nodeResult: TorrentScraper.ScrapeResult): ScrapeResult = ScrapeResult(nodeResult.request.key, nodeResult.result)
def sendToOriginalRecipient(nodeResult: TorrentScraper.ScrapeResult): Unit = {
nodeResult.request.originalRecipient ! ScrapeResult(nodeResult)
}
}
} | bwrega/btdogg | src/main/scala/com/realizationtime/btdogg/scraping/ScrapersHub.scala | Scala | mit | 3,493 |
package spire
package math
import org.scalacheck.Arbitrary
import org.scalacheck.Arbitrary.arbitrary
import org.scalatest.Matchers
import org.scalatest._
import prop._
class RationalCheck extends PropSpec with Matchers with GeneratorDrivenPropertyChecks {
type Q = Rational
implicit val arbRational: Arbitrary[Rational] = Arbitrary(for {
n <- arbitrary[BigInt]
d0 <- arbitrary[BigInt]
} yield {
val d = if (d0.signum == 0) BigInt(1) else d0
Rational(n, d)
})
def rat1(name: String)(f: Q => Unit) =
property(name) {
forAll { (nx: Long, _dx: Long) =>
val dx = if (_dx == 0) 1 else _dx
f(Rational(nx, dx))
}
}
def rat2(name: String)(f: (Q, Q) => Unit) =
property(name) {
forAll { (nx: Long, _dx: Long, ny: Long, _dy: Long) =>
val dx = if (_dx == 0) 1 else _dx
val dy = if (_dy == 0) 1 else _dy
f(Rational(nx, dx), Rational(ny, dy))
}
}
def rat3(name: String)(f: (Q, Q, Q) => Unit) =
property(name) {
forAll { (nx: Long, _dx: Long, ny: Long, _dy: Long, nz: Long, _dz: Long) =>
val dx = if (_dx == 0) 1 else _dx
val dy = if (_dy == 0) 1 else _dy
val dz = if (_dz == 0) 1 else _dz
f(Rational(nx, dx), Rational(ny, dy), Rational(nz, dz))
}
}
property("Internal GCD implementation is similar to the field of fractions implementation") {
forAll { (x: Rational, y: Rational) => x.gcd(y) shouldBe Rational(x.numerator gcd y.numerator, x.denominator lcm y.denominator) }
}
rat1("x + 0 == x") { x: Q => x + Rational(0) shouldBe x }
rat1("x * 1 == x") { x: Q => x * Rational(1) shouldBe x }
rat1("x * 0 == 0") { x: Q => x * Rational(0) shouldBe Rational(0) }
rat1("x.floor <= x.round <= x.ceil") { x: Q =>
x.floor should be <= x.round
x.round should be <= x.ceil
}
rat1("x + x == 2x") { x: Q => (x + x) shouldBe 2 * x }
rat1("x - x == 0") { x: Q => x - x shouldBe Rational(0) }
rat1("x * x == x^2") { x: Q => (x * x) shouldBe x.pow(2) }
rat1("(x^-1)^3 == x^-3") { x: Q => if (x != 0) x.reciprocal.pow(3) shouldBe x.pow(-3) }
rat1("x / x == 1") { x: Q => if (x != 0) x / x shouldBe Rational(1) }
rat2("x + y == y + x") { (x: Q, y: Q) => x + y shouldBe y + x }
rat2("x - y == -y + x") { (x: Q, y: Q) => x - y shouldBe -y + x }
rat2("x + y - x == y") { (x: Q, y: Q) => (x + y) - x shouldBe y }
rat2("x / y == x * (y^-1)") { (x: Q, y: Q) => if (y != 0) x / y shouldBe x * y.reciprocal }
rat3("(x + y) * z == x * z + y * z") { (x: Q, y: Q, z: Q) => (x + y) * z shouldBe x * z + y * z }
rat1("Round-trip to Real") { (x: Q) =>
x.toReal.toRational shouldBe x
}
rat1("Round-trip to Algebraic") { (x: Q) =>
x.toAlgebraic.toRational shouldBe Some(x)
}
property("Round-trip Double") {
forAll("x") { (n: Double) =>
Rational(n).toDouble == n
}
}
property("limitToInt does not change small Rationals") {
forAll { (n: Int, d: Int) =>
val r = Rational(n, if (d < 1) 1 else d)
r.limitToInt shouldBe r
}
}
property("limitToInt regression") {
val n = Int.MinValue
val r = Rational(n, 1)
r.limitToInt shouldBe r
}
property("Rational.numeratorIsValidLong") { (x: Q) =>
x.numeratorIsValidLong shouldBe x.numerator.isValidLong
}
property("Rational.denominatorIsValidLong") { (x: Q) =>
x.denominatorIsValidLong shouldBe x.denominator.isValidLong
}
property("limitTo(n) forces numerator and denominator to be less than n") {
implicit val arbSafeLong: Arbitrary[SafeLong] =
Arbitrary(arbitrary[BigInt].map { n => SafeLong(n.abs) }.filter(_.signum != 0))
forAll { (x: Rational, n: SafeLong) =>
val y = x.limitTo(n.abs)
(y.numerator <= n) shouldBe true
(y.denominator <= n) shouldBe true
}
}
}
| adampingel/spire | tests/src/test/scala/spire/math/RationalCheck.scala | Scala | mit | 3,811 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes.physical.stream
import org.apache.flink.api.dag.Transformation
import org.apache.flink.streaming.api.transformations.OneInputTransformation
import org.apache.flink.table.api.config.ExecutionConfigOptions
import org.apache.flink.table.data.RowData
import org.apache.flink.table.planner.calcite.FlinkTypeFactory
import org.apache.flink.table.planner.codegen.WatermarkGeneratorCodeGenerator
import org.apache.flink.table.planner.delegation.StreamPlanner
import org.apache.flink.table.planner.plan.nodes.calcite.WatermarkAssigner
import org.apache.flink.table.planner.plan.nodes.exec.{ExecNode, StreamExecNode}
import org.apache.flink.table.planner.plan.utils.RelExplainUtil.preferExpressionFormat
import org.apache.flink.table.planner.utils.TableConfigUtils.getMillisecondFromConfigDuration
import org.apache.flink.table.runtime.operators.wmassigners.WatermarkAssignerOperatorFactory
import org.apache.flink.table.runtime.typeutils.RowDataTypeInfo
import org.apache.calcite.plan.{RelOptCluster, RelTraitSet}
import org.apache.calcite.rel.{RelNode, RelWriter}
import org.apache.calcite.rex.RexNode
import java.util
import scala.collection.JavaConversions._
/**
* Stream physical RelNode for [[WatermarkAssigner]].
*/
class StreamExecWatermarkAssigner(
cluster: RelOptCluster,
traits: RelTraitSet,
inputRel: RelNode,
rowtimeFieldIndex: Int,
watermarkExpr: RexNode)
extends WatermarkAssigner(cluster, traits, inputRel, rowtimeFieldIndex, watermarkExpr)
with StreamPhysicalRel
with StreamExecNode[RowData] {
override def requireWatermark: Boolean = false
override def copy(
traitSet: RelTraitSet,
input: RelNode,
rowtime: Int,
watermark: RexNode): RelNode = {
new StreamExecWatermarkAssigner(cluster, traitSet, input, rowtime, watermark)
}
/**
* Fully override this method to have a better display name of this RelNode.
*/
override def explainTerms(pw: RelWriter): RelWriter = {
val inFieldNames = inputRel.getRowType.getFieldNames.toList
val rowtimeFieldName = inFieldNames(rowtimeFieldIndex)
pw.input("input", getInput())
.item("rowtime", rowtimeFieldName)
.item("watermark", getExpressionString(
watermarkExpr,
inFieldNames,
None,
preferExpressionFormat(pw)))
}
//~ ExecNode methods -----------------------------------------------------------
override def getInputNodes: util.List[ExecNode[StreamPlanner, _]] = {
getInputs.map(_.asInstanceOf[ExecNode[StreamPlanner, _]])
}
override def replaceInputNode(
ordinalInParent: Int,
newInputNode: ExecNode[StreamPlanner, _]): Unit = {
replaceInput(ordinalInParent, newInputNode.asInstanceOf[RelNode])
}
override protected def translateToPlanInternal(
planner: StreamPlanner): Transformation[RowData] = {
val inputTransformation = getInputNodes.get(0).translateToPlan(planner)
.asInstanceOf[Transformation[RowData]]
val config = planner.getTableConfig
val idleTimeout = getMillisecondFromConfigDuration(config,
ExecutionConfigOptions.TABLE_EXEC_SOURCE_IDLE_TIMEOUT)
val watermarkGenerator = WatermarkGeneratorCodeGenerator.generateWatermarkGenerator(
config,
FlinkTypeFactory.toLogicalRowType(inputRel.getRowType),
watermarkExpr)
val operatorFactory = new WatermarkAssignerOperatorFactory(
rowtimeFieldIndex,
idleTimeout,
watermarkGenerator)
val outputRowTypeInfo = RowDataTypeInfo.of(FlinkTypeFactory.toLogicalRowType(getRowType))
val transformation = new OneInputTransformation[RowData, RowData](
inputTransformation,
getRelDetailedDescription,
operatorFactory,
outputRowTypeInfo,
inputTransformation.getParallelism)
transformation
}
}
| GJL/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/physical/stream/StreamExecWatermarkAssigner.scala | Scala | apache-2.0 | 4,640 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.spark
import java.lang.{Long ⇒ JLong}
import org.apache.ignite.cache.query.SqlFieldsQuery
import org.apache.ignite.internal.IgnitionEx
import org.apache.ignite.internal.util.IgniteUtils.resolveIgnitePath
import org.apache.ignite.spark.AbstractDataFrameSpec.{DEFAULT_CACHE, EMPLOYEE_CACHE_NAME, TEST_CONFIG_FILE, enclose}
import org.apache.spark.sql.ignite.IgniteSparkSession
import org.apache.spark.sql.types.{LongType, StringType}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
/**
* Tests to check Spark Catalog implementation.
*/
@RunWith(classOf[JUnitRunner])
class IgniteCatalogSpec extends AbstractDataFrameSpec {
var igniteSession: IgniteSparkSession = _
describe("Ignite Catalog Implementation") {
it("Should observe all available SQL tables") {
val tables = igniteSession.catalog.listTables.collect()
tables.length should equal (3)
tables.map(_.name).sorted should equal (Array("CITY", "EMPLOYEE", "PERSON"))
}
it("Should provide correct schema for SQL table") {
val columns = igniteSession.catalog.listColumns("city").collect()
columns.length should equal (2)
columns.map(c ⇒ (c.name, c.dataType, c.nullable)).sorted should equal (
Array(
("ID", LongType.catalogString, false),
("NAME", StringType.catalogString, true)))
}
it("Should provide ability to query SQL table without explicit registration") {
val res = igniteSession.sql("SELECT id, name FROM city").rdd
res.count should equal(4)
val cities = res.collect.sortBy(_.getAs[JLong]("id"))
cities.map(c ⇒ (c.getAs[JLong]("id"), c.getAs[String]("name"))) should equal (
Array(
(1, "Forest Hill"),
(2, "Denver"),
(3, "St. Petersburg"),
(4, "St. Petersburg")
)
)
}
it("Should provide ability to query SQL table configured throw java annotations without explicit registration") {
val res = igniteSession.sql("SELECT id, name, salary FROM employee").rdd
res.count should equal(3)
val employees = res.collect.sortBy(_.getAs[JLong]("id"))
employees.map(c ⇒ (c.getAs[JLong]("id"), c.getAs[String]("name"), c.getAs[Float]("salary"))) should equal (
Array(
(1, "John Connor", 0f),
(2, "Sarah Connor", 10000f),
(3, "Arnold Schwarzenegger", 1000f)
)
)
}
it("Should provide newly created tables in tables list") {
val cache = client.cache(DEFAULT_CACHE)
cache.query(new SqlFieldsQuery(
"CREATE TABLE new_table(id LONG PRIMARY KEY, name VARCHAR) WITH \"template=replicated\"")).getAll
val tables = igniteSession.catalog.listTables.collect()
tables.find(_.name == "NEW_TABLE").map(_.name) should equal (Some("NEW_TABLE"))
val columns = igniteSession.catalog.listColumns("NEW_TABLE").collect()
columns.map(c ⇒ (c.name, c.dataType, c.nullable)).sorted should equal (
Array(
("ID", LongType.catalogString, false),
("NAME", StringType.catalogString, true)))
}
it("Should allow register tables based on other datasources") {
val citiesDataFrame = igniteSession.read.json(
resolveIgnitePath("modules/spark/src/test/resources/cities.json").getAbsolutePath)
citiesDataFrame.createOrReplaceTempView("JSON_CITIES")
val res = igniteSession.sql("SELECT id, name FROM json_cities").rdd
res.count should equal(3)
val cities = res.collect
cities.map(c ⇒ (c.getAs[JLong]("id"), c.getAs[String]("name"))) should equal (
Array(
(1, "Forest Hill"),
(2, "Denver"),
(3, "St. Petersburg")
)
)
}
}
override protected def beforeAll(): Unit = {
super.beforeAll()
createPersonTable(client, DEFAULT_CACHE)
createCityTable(client, DEFAULT_CACHE)
createEmployeeCache(client, EMPLOYEE_CACHE_NAME)
val configProvider = enclose(null) (_ ⇒ () ⇒ {
val cfg = IgnitionEx.loadConfiguration(TEST_CONFIG_FILE).get1()
cfg.setClientMode(true)
cfg.setIgniteInstanceName("client-2")
cfg
})
igniteSession = IgniteSparkSession.builder()
.config(spark.sparkContext.getConf)
.igniteConfigProvider(configProvider)
.getOrCreate()
}
}
| irudyak/ignite | modules/spark/src/test/scala/org/apache/ignite/spark/IgniteCatalogSpec.scala | Scala | apache-2.0 | 5,690 |
package shield.swagger
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
import com.typesafe.scalalogging.LazyLogging
import io.swagger.models.apideclaration.{ApiDeclaration, Parameter}
import io.swagger.models.resourcelisting.{ApiListingReference, ResourceListing}
import io.swagger.parser.SwaggerLegacyParser
import io.swagger.reader.{SwaggerReaderConfiguration, SwaggerReaderFactory}
import io.swagger.transform.migrate.{V11ApiDeclarationMigrator, V11ResourceListingMigrator}
import io.swagger.validate.{ApiDeclarationSchemaValidator, ResourceListingSchemaValidator}
import shield.config.ServiceLocation
import shield.routing._
import shield.transports.HttpTransport.SendReceive
import spray.http.parser.HttpParser
import spray.http._
import scala.collection.JavaConverters._
import scala.concurrent.{ExecutionContext, Future}
object Swagger1Helper {
private val mapper = new ObjectMapper().enable(DeserializationFeature.READ_ENUMS_USING_TO_STRING)
private val reader = new SwaggerReaderFactory(new SwaggerReaderConfiguration()).newReader()
private val listingMigrator = new V11ResourceListingMigrator()
private val listingValidator = new ResourceListingSchemaValidator()
private val apiMigrator = new V11ApiDeclarationMigrator()
private val apiValidator = new ApiDeclarationSchemaValidator()
def parseResourceListing(response: HttpResponse) : ResourceListing = {
val node = mapper.readTree(response.entity.data.toByteArray)
val migratedNode = listingMigrator.migrate(node)
//lovely java exception throwing code :(
listingValidator.validate(migratedNode)
mapper.readValue(migratedNode.traverse(), classOf[ResourceListing])
}
def parseApiDeclaration(response: HttpResponse) : ApiDeclaration = {
val node = mapper.readTree(response.entity.data.toByteArray)
val migratedNode = apiMigrator.migrate(node)
//lovely java exception throwing code :(
apiValidator.validate(migratedNode)
mapper.readValue(migratedNode.traverse(), classOf[ApiDeclaration])
}
}
class Swagger1Fetcher(basePath: String, pipeline: SendReceive)(implicit executor: ExecutionContext) extends SwaggerFetcher with LazyLogging {
val parser = new SwaggerLegacyParser
def translate(apiReference: ApiListingReference) = {
pipeline(HttpRequest(HttpMethods.GET, basePath + apiReference.getPath))
.map(Swagger1Helper.parseApiDeclaration)
.map { declaration =>
for {
api <- declaration.getApis.asScala
op <- api.getOperations.asScala
} yield {
(
EndpointTemplate(
HttpMethods.getForKey(op.getMethod.toValue.toUpperCase).get,
Path(api.getPath)
),
EndpointDetails(
op.getParameters.asScala.map(translateParam).toSet,
op.getConsumes.asScala.flatMap(translateMediaType).toSet,
op.getProduces.asScala.flatMap(translateMediaType).toSet,
Set(),
Set()
)
)
}
}
}
def translateParam(param: Parameter) : Param = {
param.getParamType.toValue.toLowerCase match {
case "body" => BodyParam(param.getName)
case "form" => FormParam(param.getName)
case "header" => HeaderParam(param.getName.toLowerCase)
case "path" => PathParam(param.getName)
case "query" => QueryParam(param.getName)
}
}
def translateMediaType(mt: String) : Option[MediaType] = {
// todo: handle invalid content types
HttpParser.parse(HttpParser.ContentTypeHeaderValue, mt).fold(e => None, ct => Some(ct.mediaType))
}
def fetch(host: ServiceLocation) : Future[SwaggerDetails] = {
logger.info(s"Fetching swagger1 api-docs from $host")
val rootDocFuture = pipeline(HttpRequest(HttpMethods.GET, Uri(basePath))).map(Swagger1Helper.parseResourceListing)
val apiDocsFuture = rootDocFuture.flatMap(root => Future.sequence(root.getApis.asScala.map(translate)))
for {
rootDoc <- rootDocFuture
apiDocs <- apiDocsFuture
} yield SwaggerDetails(
Option(rootDoc.getInfo).flatMap(i => Option(i.getTitle)).getOrElse("(not specified)"),
Option(rootDoc.getApiVersion).getOrElse("(not specified)"),
apiDocs.flatten.toMap
)
}
}
| RetailMeNot/shield | src/main/scala/shield/swagger/Swagger1Fetcher.scala | Scala | mit | 4,268 |
package de.htwg.zeta.common.format.project
import scala.collection.immutable.ListMap
import de.htwg.zeta.common.models.project.concept.elements.AttributeType
import de.htwg.zeta.common.models.project.concept.elements.Method
import play.api.libs.json.JsArray
import play.api.libs.json.JsObject
import play.api.libs.json.JsResult
import play.api.libs.json.JsValue
import play.api.libs.json.Json
import play.api.libs.json.OFormat
import play.api.libs.json.Reads
@SuppressWarnings(Array("org.wartremover.warts.DefaultArguments"))
class MethodFormat(
attributeTypeFormat: AttributeTypeFormat,
sName: String = "name",
sParameters: String = "parameters",
sType: String = "type",
sDescription: String = "description",
sReturnType: String = "returnType",
sCode: String = "code"
) extends OFormat[Method] {
override def writes(method: Method): JsObject = Json.obj(
sName -> method.name,
sParameters -> writesParameters(method.parameters),
sDescription -> method.description,
sReturnType -> attributeTypeFormat.writes(method.returnType),
sCode -> method.code
)
private def writesParameters(parameters: ListMap[String, AttributeType]): JsArray = JsArray(
parameters.map { case (name, typ) => Json.obj(
sName -> name,
sType -> attributeTypeFormat.writes(typ)
)
}.toList
)
override def reads(json: JsValue): JsResult[Method] = for {
name <- (json \ sName).validate[String]
parameters <- (json \ sParameters).validate(readsParameters)
description <- (json \ sDescription).validate[String]
returnType <- (json \ sReturnType).validate(attributeTypeFormat)
code <- (json \ sCode).validate[String]
} yield {
Method(name, parameters, description, returnType, code)
}
private def readsParameters: Reads[ListMap[String, AttributeType]] = Reads { json =>
json.validate(Reads.list(readsParameter)).map(ListMap(_: _*))
}
private def readsParameter: Reads[(String, AttributeType)] = Reads { json =>
for {
name <- (json \ sName).validate[String]
typ <- (json \ sType).validate(attributeTypeFormat)
} yield {
(name, typ)
}
}
}
| Zeta-Project/zeta | api/common/src/main/scala/de/htwg/zeta/common/format/project/MethodFormat.scala | Scala | bsd-2-clause | 2,160 |
package com.github.bluenote
sealed trait GameStateChange
object GameStateChange {
case class DungeonLoaded(dungeon: Dungeon) extends GameStateChange
case class PlayerPosition(pos: Point) extends GameStateChange
} | bluenote10/DungeonRift | src/main/scala/com/github/bluenote/GameStateChange.scala | Scala | gpl-2.0 | 225 |
import sbt.Keys._
import sbt._
import sbtassembly.Plugin.AssemblyKeys._
import sbtassembly.Plugin._
object ApplicationBuild extends Build {
lazy val serviceDependencies = Seq(
"com.yammer.dropwizard" % "dropwizard-core" % "0.6.2",
"uk.gov.defra" % "capd-common" % "1.0.4"
)
lazy val clientDependencies = Seq (
"com.sun.jersey" % "jersey-client" % "1.17.1",
"com.sun.jersey" % "jersey-core" % "1.17.1",
"com.sun.jersey" % "jersey-json" % "1.17.1",
"org.modelmapper" % "modelmapper" % "0.6.2",
"org.apache.commons" % "commons-lang3" % "3.3.2",
"com.google.guava" % "guava" % "18.0"
)
lazy val testDependencies = Seq (
"com.novocode" % "junit-interface" % "0.11" % "test",
"org.mockito" % "mockito-core" % "1.9.5" % "test" exclude("org.hamcrest", "hamcrest-core"),
"org.hamcrest" % "hamcrest-all" % "1.3" % "test"
)
val appReleaseSettings = Seq(
// Publishing options:
publishMavenStyle := true,
publishArtifact in Test := false,
pomIncludeRepository := { x => false },
publishTo <<= version { (v: String) =>
val nexus = "https://defranexus.kainos.com/"
if (v.trim.endsWith("SNAPSHOT"))
Some("sonatype-snapshots" at nexus + "content/repositories/snapshots")
else
Some("sonatype-releases" at nexus + "content/repositories/releases")
},
credentials += Credentials(Path.userHome / ".ivy2" / ".credentials")
)
def defaultResolvers = Seq(
"DEFRA Nexus Release repo" at "https://defranexus.kainos.com/content/repositories/releases/"
)
def commonSettings = Seq(
organization := "uk.gov.defra",
autoScalaLibrary := false,
scalaVersion := "2.10.2",
crossPaths := false,
resolvers ++= defaultResolvers
)
def standardSettingsWithAssembly = commonSettings ++ assemblySettings ++ appReleaseSettings ++ Seq(
mergeStrategy in assembly <<= (mergeStrategy in assembly) { (old) =>
{
case "about.html" => MergeStrategy.rename
case "META-INF/spring.tooling" => MergeStrategy.discard
case x => old(x)
}
},
test in assembly := {}
)
lazy val root = Project("places", file("."), settings = appReleaseSettings ++ Seq(
name := "places",
resolvers ++= defaultResolvers
)) aggregate(PlacesService, PlacesOsClient)
lazy val PlacesService: Project = Project("places-service", file("places-service"),
settings = standardSettingsWithAssembly ++ Seq(
jarName in assembly := "places-service.jar",
name := "places-serivce",
libraryDependencies ++= serviceDependencies ++ testDependencies
)) dependsOn(PlacesOsClient % "compile")
lazy val PlacesOsClient = Project("places-os-client", file("places-os-client"),
settings = standardSettingsWithAssembly ++ Seq(
name := "places-os-client",
libraryDependencies ++= clientDependencies ++ testDependencies
))
}
| Defra/places-service | project/Build.scala | Scala | mit | 2,901 |
package com.alvrod.cryptopals.test.set1
import com.alvrod.cryptopals.Convert
import com.alvrod.cryptopals.breakers.AesMode
import com.alvrod.cryptopals.ciphers.{AES, RepeatingByteXor, SingleByteXor}
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import scala.io.Source
@RunWith(classOf[JUnitRunner])
class ECB extends FunSuite {
test("Yellow Submarine") {
val source = Source.fromURL("http://cryptopals.com/static/challenge-data/7.txt")
val ciphertextBase64 = source.getLines().foldLeft("")((acc, item) => acc + item)
val decoder = new sun.misc.BASE64Decoder()
val ciphertext = decoder.decodeBuffer(ciphertextBase64)
val plaintextBytes = AES.decryptECB(ciphertext, "YELLOW SUBMARINE".getBytes)
println(new String(plaintextBytes))
}
test("Detect ECB") {
val source = Source.fromURL("http://cryptopals.com/static/challenge-data/8.txt")
val hexLines = source.getLines()
val ecb = AesMode.detectECB(hexLines)
println(ecb)
}
}
| alvrod/cryptopals | test/src/com/alvrod/cryptopals/test/set1/ECB.scala | Scala | gpl-2.0 | 1,028 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import java.io.IOException
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.internal.io.FileCommitProtocol
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogTable, CatalogTablePartition}
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.SparkPlan
import org.apache.spark.sql.execution.command._
import org.apache.spark.sql.util.SchemaUtils
/**
* A command for writing data to a [[HadoopFsRelation]]. Supports both overwriting and appending.
* Writing to dynamic partitions is also supported.
*
* @param staticPartitions partial partitioning spec for write. This defines the scope of partition
* overwrites: when the spec is empty, all partitions are overwritten.
* When it covers a prefix of the partition keys, only partitions matching
* the prefix are overwritten.
* @param ifPartitionNotExists If true, only write if the partition does not exist.
* Only valid for static partitions.
*/
case class InsertIntoHadoopFsRelationCommand(
outputPath: Path,
staticPartitions: TablePartitionSpec,
ifPartitionNotExists: Boolean,
partitionColumns: Seq[Attribute],
bucketSpec: Option[BucketSpec],
fileFormat: FileFormat,
options: Map[String, String],
query: LogicalPlan,
mode: SaveMode,
catalogTable: Option[CatalogTable],
fileIndex: Option[FileIndex])
extends DataWritingCommand {
import org.apache.spark.sql.catalyst.catalog.ExternalCatalogUtils.escapePathName
override def children: Seq[LogicalPlan] = query :: Nil
override def run(sparkSession: SparkSession, children: Seq[SparkPlan]): Seq[Row] = {
assert(children.length == 1)
// Most formats don't do well with duplicate columns, so lets not allow that
SchemaUtils.checkSchemaColumnNameDuplication(
query.schema,
s"when inserting into $outputPath",
sparkSession.sessionState.conf.caseSensitiveAnalysis)
val hadoopConf = sparkSession.sessionState.newHadoopConfWithOptions(options)
val fs = outputPath.getFileSystem(hadoopConf)
val qualifiedOutputPath = outputPath.makeQualified(fs.getUri, fs.getWorkingDirectory)
val partitionsTrackedByCatalog = sparkSession.sessionState.conf.manageFilesourcePartitions &&
catalogTable.isDefined &&
catalogTable.get.partitionColumnNames.nonEmpty &&
catalogTable.get.tracksPartitionsInCatalog
var initialMatchingPartitions: Seq[TablePartitionSpec] = Nil
var customPartitionLocations: Map[TablePartitionSpec, String] = Map.empty
var matchingPartitions: Seq[CatalogTablePartition] = Seq.empty
// When partitions are tracked by the catalog, compute all custom partition locations that
// may be relevant to the insertion job.
if (partitionsTrackedByCatalog) {
matchingPartitions = sparkSession.sessionState.catalog.listPartitions(
catalogTable.get.identifier, Some(staticPartitions))
initialMatchingPartitions = matchingPartitions.map(_.spec)
customPartitionLocations = getCustomPartitionLocations(
fs, catalogTable.get, qualifiedOutputPath, matchingPartitions)
}
val pathExists = fs.exists(qualifiedOutputPath)
// If we are appending data to an existing dir.
val isAppend = pathExists && (mode == SaveMode.Append)
val committer = FileCommitProtocol.instantiate(
sparkSession.sessionState.conf.fileCommitProtocolClass,
jobId = java.util.UUID.randomUUID().toString,
outputPath = outputPath.toString,
isAppend = isAppend)
val doInsertion = (mode, pathExists) match {
case (SaveMode.ErrorIfExists, true) =>
throw new AnalysisException(s"path $qualifiedOutputPath already exists.")
case (SaveMode.Overwrite, true) =>
if (ifPartitionNotExists && matchingPartitions.nonEmpty) {
false
} else {
deleteMatchingPartitions(fs, qualifiedOutputPath, customPartitionLocations, committer)
true
}
case (SaveMode.Append, _) | (SaveMode.Overwrite, _) | (SaveMode.ErrorIfExists, false) =>
true
case (SaveMode.Ignore, exists) =>
!exists
case (s, exists) =>
throw new IllegalStateException(s"unsupported save mode $s ($exists)")
}
if (doInsertion) {
// Callback for updating metric and metastore partition metadata
// after the insertion job completes.
def refreshCallback(summary: Seq[ExecutedWriteSummary]): Unit = {
val updatedPartitions = summary.flatMap(_.updatedPartitions)
.distinct.map(PartitioningUtils.parsePathFragment)
// Updating metrics.
updateWritingMetrics(summary)
// Updating metastore partition metadata.
if (partitionsTrackedByCatalog) {
val newPartitions = updatedPartitions.toSet -- initialMatchingPartitions
if (newPartitions.nonEmpty) {
AlterTableAddPartitionCommand(
catalogTable.get.identifier, newPartitions.toSeq.map(p => (p, None)),
ifNotExists = true).run(sparkSession)
}
if (mode == SaveMode.Overwrite) {
val deletedPartitions = initialMatchingPartitions.toSet -- updatedPartitions
if (deletedPartitions.nonEmpty) {
AlterTableDropPartitionCommand(
catalogTable.get.identifier, deletedPartitions.toSeq,
ifExists = true, purge = false,
retainData = true /* already deleted */).run(sparkSession)
}
}
}
}
FileFormatWriter.write(
sparkSession = sparkSession,
plan = children.head,
fileFormat = fileFormat,
committer = committer,
outputSpec = FileFormatWriter.OutputSpec(
qualifiedOutputPath.toString, customPartitionLocations),
hadoopConf = hadoopConf,
partitionColumns = partitionColumns,
bucketSpec = bucketSpec,
refreshFunction = refreshCallback,
options = options)
// refresh cached files in FileIndex
fileIndex.foreach(_.refresh())
// refresh data cache if table is cached
sparkSession.catalog.refreshByPath(outputPath.toString)
if (catalogTable.nonEmpty) {
CommandUtils.updateTableStats(sparkSession, catalogTable.get)
}
} else {
logInfo("Skipping insertion into a relation that already exists.")
}
Seq.empty[Row]
}
/**
* Deletes all partition files that match the specified static prefix. Partitions with custom
* locations are also cleared based on the custom locations map given to this class.
*/
private def deleteMatchingPartitions(
fs: FileSystem,
qualifiedOutputPath: Path,
customPartitionLocations: Map[TablePartitionSpec, String],
committer: FileCommitProtocol): Unit = {
val staticPartitionPrefix = if (staticPartitions.nonEmpty) {
"/" + partitionColumns.flatMap { p =>
staticPartitions.get(p.name) match {
case Some(value) =>
Some(escapePathName(p.name) + "=" + escapePathName(value))
case None =>
None
}
}.mkString("/")
} else {
""
}
// first clear the path determined by the static partition keys (e.g. /table/foo=1)
val staticPrefixPath = qualifiedOutputPath.suffix(staticPartitionPrefix)
if (fs.exists(staticPrefixPath) && !committer.deleteWithJob(fs, staticPrefixPath, true)) {
throw new IOException(s"Unable to clear output " +
s"directory $staticPrefixPath prior to writing to it")
}
// now clear all custom partition locations (e.g. /custom/dir/where/foo=2/bar=4)
for ((spec, customLoc) <- customPartitionLocations) {
assert(
(staticPartitions.toSet -- spec).isEmpty,
"Custom partition location did not match static partitioning keys")
val path = new Path(customLoc)
if (fs.exists(path) && !committer.deleteWithJob(fs, path, true)) {
throw new IOException(s"Unable to clear partition " +
s"directory $path prior to writing to it")
}
}
}
/**
* Given a set of input partitions, returns those that have locations that differ from the
* Hive default (e.g. /k1=v1/k2=v2). These partitions were manually assigned locations by
* the user.
*
* @return a mapping from partition specs to their custom locations
*/
private def getCustomPartitionLocations(
fs: FileSystem,
table: CatalogTable,
qualifiedOutputPath: Path,
partitions: Seq[CatalogTablePartition]): Map[TablePartitionSpec, String] = {
partitions.flatMap { p =>
val defaultLocation = qualifiedOutputPath.suffix(
"/" + PartitioningUtils.getPathFragment(p.spec, table.partitionSchema)).toString
val catalogLocation = new Path(p.location).makeQualified(
fs.getUri, fs.getWorkingDirectory).toString
if (catalogLocation != defaultLocation) {
Some(p.spec -> catalogLocation)
} else {
None
}
}.toMap
}
}
| VigneshMohan1/spark-branch-2.3 | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/InsertIntoHadoopFsRelationCommand.scala | Scala | apache-2.0 | 10,119 |
package im.actor.server.api.rpc.service.auth
import java.util.regex.Pattern
import scalaz._
import scalaz.syntax.all._
import slick.dbio._
import im.actor.api.rpc._
import im.actor.util.misc.StringUtils
private[auth] trait Helpers extends PublicKeyHelpers {
val emailPattern = Pattern.compile("""^[-.\\w]+@(?:[a-z\\d]{2,}\\.)+[a-z]{2,6}$""", Pattern.UNICODE_CHARACTER_CLASS) //which regexp should we use?
def matchesEmail(s: String): \\/[NonEmptyList[String], String] =
if (emailPattern.matcher(s).matches) s.right else "Should be valid email address".wrapNel.left
def validEmail(email: String): \\/[NonEmptyList[String], String] =
StringUtils.nonEmptyString(email).flatMap(matchesEmail)
def validPublicKey(k: Array[Byte]): \\/[NonEmptyList[String], Array[Byte]] =
if (k.isEmpty) "Should be nonempty".wrapNel.left else k.right
def validationFailed(errorName: String, errors: NonEmptyList[String]): RpcError =
RpcError(400, errorName, errors.toList.mkString(", "), false, None)
def withValidName[A, E <: Effect](n: String)(f: String ⇒ DBIOAction[RpcError \\/ A, NoStream, E]): DBIOAction[RpcError \\/ A, NoStream, E] =
StringUtils.validName(n).fold(
x ⇒
DBIO.successful(Error(validationFailed("NAME_INVALID", x))),
f
)
def withValidPublicKey[A, E <: Effect](k: Array[Byte])(f: Array[Byte] ⇒ DBIOAction[RpcError \\/ A, NoStream, E]): DBIOAction[RpcError \\/ A, NoStream, E] =
validPublicKey(k).fold(
x ⇒
DBIO.successful(Error(validationFailed("PUBLIC_KEY_INVALID", x))),
f
)
}
| WangCrystal/actor-platform | actor-server/actor-rpc-api/src/main/scala/im/actor/server/api/rpc/service/auth/Helpers.scala | Scala | mit | 1,570 |
/*
Spot is a bot, implementing a subset of AIML, and some extensions.
Copyright (C) 2016 Marius Feteanu
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.spotai
package template
import org.scalatest._
class TemplateGetNameSpec extends FlatSpec with Matchers {
behavior of "A TemplateGetName."
it must "be equal to another if created from the same String" in {
TemplateGetName("XYZ") shouldBe TemplateGetName("XYZ")
}
it must "be different from another if created from different String" in {
TemplateGetName("XYZ") should not be TemplateGetName("ABC")
}
it must "have a content equal to the string it was created from" in {
TemplateGetName("XYZ").name shouldBe "XYZ"
}
}
| mariusfeteanu/spot | src/test/scala/com/spotai/template/TemplateGetNameSpec.scala | Scala | gpl-3.0 | 1,277 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel
package scala.dsl;
import builder.RouteBuilder
import junit.framework.Assert._
/**
* Test case for try (attempt) - catch (handle) - finally (ensure)
*/
class TryCatchFinallyTest extends ScalaTestSupport {
var handled = false;
def testTryCatchFinally = {
"mock:a" expect { _.count = 1 }
"mock:b" expect { _.count = 1 }
"mock:c" expect { _.count = 2 }
test {
"direct:a" ! ("any given message", 256)
}
}
val builder =
new RouteBuilder {
val failingProcessor = (exchange: Exchange) => {
exchange.in match {
case text: String => //graciously do nothing
case _ => throw new RuntimeException("Strings are good, the rest is bad")
}
}
val catchProcessor = (exchange: Exchange) => {
// we shouldn't get any Strings here
assertFalse(exchange.getIn().getBody().getClass().equals(classOf[String]))
// the exchange shouldn't have been marked failed
assertFalse(exchange.isFailed)
}
//START SNIPPET: block}
"direct:a" ==> {
attempt {
process(failingProcessor)
to ("mock:a")
} handle(classOf[Exception]) apply {
process(catchProcessor)
to ("mock:b")
} ensure {
to ("mock:c");
}
}
//END SNIPPET: block
}
}
| cexbrayat/camel | components/camel-scala/src/test/scala/org/apache/camel/scala/dsl/TryCatchFinallyTest.scala | Scala | apache-2.0 | 2,229 |
package skuber
/**
* @author David O'Riordan
* Maintain API-relevant details for a resource type in Kubernetes
* Each resource type O (e.g. Pod) implemented in skuber defines an implicit value of type
* ResourceDefinition[O] in its companion object which has a method to return an ResourceSpecification.
* When a client invokes a skuber API method on a resource of type O then that value gets implicitly
* passed to the method, which provides skuber with the details required to set the URL for the remote call.
* ResourceSpecification mirrors the specification of the CustomResourceDefinition type
* introduced in Kubernetes V1.7, and the CRD case class utilises it for that. It is an abstract
* base class with two concrete case subclasses, for core and non-core API group resource types respectively.
*/
abstract class ResourceSpecification {
def apiPathPrefix: String
def group: Option[String] // set to None if defined on core API group, otherwise Some(groupName)
def version: String
def scope: ResourceSpecification.Scope.Value
def names: ResourceSpecification.Names
}
object ResourceSpecification {
object Scope extends Enumeration {
type ResourceScope = Value
val Namespaced, Cluster = Value
}
case class Names(
plural: String,
singular: String,
kind: String,
shortNames: List[String] // these abbreviations are only really useful at the moment with CRDs
)
}
case class CoreResourceSpecification(
override val group: Option[String] = None,
override val version: String = "v1",
override val scope: ResourceSpecification.Scope.Value,
override val names: ResourceSpecification.Names) extends ResourceSpecification
{
def apiPathPrefix="api"
}
case class NonCoreResourceSpecification(
override val group: Option[String],
override val version: String,
override val scope: ResourceSpecification.Scope.Value,
override val names: ResourceSpecification.Names) extends ResourceSpecification
{
def apiPathPrefix="apis"
}
| minatjanster/skuber | client/src/main/scala/skuber/ResourceSpecification.scala | Scala | apache-2.0 | 2,077 |
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO2
package com.google.protobuf.compiler.plugin
object PluginProto extends _root_.scalapb.GeneratedFileObject {
lazy val dependencies: Seq[_root_.scalapb.GeneratedFileObject] = Seq(
com.google.protobuf.descriptor.DescriptorProtoCompanion
)
lazy val messagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] =
Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]](
com.google.protobuf.compiler.plugin.Version,
com.google.protobuf.compiler.plugin.CodeGeneratorRequest,
com.google.protobuf.compiler.plugin.CodeGeneratorResponse
)
private lazy val ProtoBytes: _root_.scala.Array[Byte] =
scalapb.Encoding.fromBase64(scala.collection.immutable.Seq(
"""CiVnb29nbGUvcHJvdG9idWYvY29tcGlsZXIvcGx1Z2luLnByb3RvEhhnb29nbGUucHJvdG9idWYuY29tcGlsZXIaIGdvb2dsZ
S9wcm90b2J1Zi9kZXNjcmlwdG9yLnByb3RvIpQBCgdWZXJzaW9uEiAKBW1ham9yGAEgASgFQgriPwcSBW1ham9yUgVtYWpvchIgC
gVtaW5vchgCIAEoBUIK4j8HEgVtaW5vclIFbWlub3ISIAoFcGF0Y2gYAyABKAVCCuI/BxIFcGF0Y2hSBXBhdGNoEiMKBnN1ZmZpe
BgEIAEoCUIL4j8IEgZzdWZmaXhSBnN1ZmZpeCK8AgoUQ29kZUdlbmVyYXRvclJlcXVlc3QSPQoQZmlsZV90b19nZW5lcmF0ZRgBI
AMoCUIT4j8QEg5maWxlVG9HZW5lcmF0ZVIOZmlsZVRvR2VuZXJhdGUSLAoJcGFyYW1ldGVyGAIgASgJQg7iPwsSCXBhcmFtZXRlc
lIJcGFyYW1ldGVyElMKCnByb3RvX2ZpbGUYDyADKAsyJC5nb29nbGUucHJvdG9idWYuRmlsZURlc2NyaXB0b3JQcm90b0IO4j8LE
glwcm90b0ZpbGVSCXByb3RvRmlsZRJiChBjb21waWxlcl92ZXJzaW9uGAMgASgLMiEuZ29vZ2xlLnByb3RvYnVmLmNvbXBpbGVyL
lZlcnNpb25CFOI/ERIPY29tcGlsZXJWZXJzaW9uUg9jb21waWxlclZlcnNpb24izgMKFUNvZGVHZW5lcmF0b3JSZXNwb25zZRIgC
gVlcnJvchgBIAEoCUIK4j8HEgVlcnJvclIFZXJyb3ISRQoSc3VwcG9ydGVkX2ZlYXR1cmVzGAIgASgEQhbiPxMSEXN1cHBvcnRlZ
EZlYXR1cmVzUhFzdXBwb3J0ZWRGZWF0dXJlcxJTCgRmaWxlGA8gAygLMjQuZ29vZ2xlLnByb3RvYnVmLmNvbXBpbGVyLkNvZGVHZ
W5lcmF0b3JSZXNwb25zZS5GaWxlQgniPwYSBGZpbGVSBGZpbGUaiwEKBEZpbGUSHQoEbmFtZRgBIAEoCUIJ4j8GEgRuYW1lUgRuY
W1lEjwKD2luc2VydGlvbl9wb2ludBgCIAEoCUIT4j8QEg5pbnNlcnRpb25Qb2ludFIOaW5zZXJ0aW9uUG9pbnQSJgoHY29udGVud
BgPIAEoCUIM4j8JEgdjb250ZW50Ugdjb250ZW50ImkKB0ZlYXR1cmUSIwoMRkVBVFVSRV9OT05FEAAaEeI/DhIMRkVBVFVSRV9OT
05FEjkKF0ZFQVRVUkVfUFJPVE8zX09QVElPTkFMEAEaHOI/GRIXRkVBVFVSRV9QUk9UTzNfT1BUSU9OQUxCZwocY29tLmdvb2dsZ
S5wcm90b2J1Zi5jb21waWxlckIMUGx1Z2luUHJvdG9zWjlnaXRodWIuY29tL2dvbGFuZy9wcm90b2J1Zi9wcm90b2MtZ2VuLWdvL
3BsdWdpbjtwbHVnaW5fZ28="""
).mkString)
lazy val scalaDescriptor: _root_.scalapb.descriptors.FileDescriptor = {
val scalaProto = com.google.protobuf.descriptor.FileDescriptorProto.parseFrom(ProtoBytes)
_root_.scalapb.descriptors.FileDescriptor.buildFrom(scalaProto, dependencies.map(_.scalaDescriptor))
}
lazy val javaDescriptor: com.google.protobuf.Descriptors.FileDescriptor = {
val javaProto = com.google.protobuf.DescriptorProtos.FileDescriptorProto.parseFrom(ProtoBytes)
com.google.protobuf.Descriptors.FileDescriptor.buildFrom(javaProto, _root_.scala.Array(
com.google.protobuf.descriptor.DescriptorProtoCompanion.javaDescriptor
))
}
@deprecated("Use javaDescriptor instead. In a future version this will refer to scalaDescriptor.", "ScalaPB 0.5.47")
def descriptor: com.google.protobuf.Descriptors.FileDescriptor = javaDescriptor
} | scalapb/ScalaPB | scalapb-runtime/src/main/js-native/com/google/protobuf/compiler/plugin/PluginProto.scala | Scala | apache-2.0 | 3,303 |
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
import com.intellij.psi.stubs.StubElement
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.ScImportStmt
/**
* User: Alexander Podkhalyuzin
* Date: 18.06.2009
*/
trait ScImportStmtStub extends StubElement[ScImportStmt] {
def importText: String
} | ilinum/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/stubs/ScImportStmtStub.scala | Scala | apache-2.0 | 345 |
/*
* Copyright (C) 2011 Mathias Doenitz
* Adapted and extended in 2016 by Eugene Yokota
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sjsonnew
package support.spray
import spray.json.{ JsValue, JsNumber, JsString, JsNull, JsTrue, JsFalse, JsObject }
import scala.Right
object StandardFormatsSpec extends verify.BasicTestSuite with BasicJsonProtocol {
case class Person(name: Option[String], value: Option[Int])
implicit object PersonFormat extends JsonFormat[Person] {
def write[J](x: Person, builder: Builder[J]): Unit = {
builder.beginObject()
builder.addField("name", x.name)
builder.addField("value", x.value)
builder.endObject()
}
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Person =
jsOpt match {
case Some(js) =>
unbuilder.beginObject(js)
val name = unbuilder.readField[Option[String]]("name")
val value = unbuilder.readField[Option[Int]]("value")
unbuilder.endObject()
Person(name, value)
case None =>
deserializationError("Expected JsObject but found None")
}
}
test("The optionFormat") {
// "convert None to JsNull"
Predef.assert(Converter.toJsonUnsafe(None.asInstanceOf[Option[Int]]) == JsNull)
// "convert JsNull to None"
Predef.assert(Converter.fromJsonUnsafe[Option[Int]](JsNull) == None)
// "convert Some(Hello) to JsString(Hello)"
Predef.assert(Converter.toJsonUnsafe(Some("Hello").asInstanceOf[Option[String]]) == JsString("Hello"))
// "convert JsString(Hello) to Some(Hello)"
Predef.assert(Converter.fromJsonUnsafe[Option[String]](JsString("Hello")) == Some("Hello"))
// "omit None fields"
Predef.assert(Converter.toJsonUnsafe(Person(None, None)) == JsObject())
}
test("The eitherFormat") {
val a: Either[Int, String] = Left(42)
val b: Either[Int, String] = Right("Hello")
// "convert the left side of an Either value to Json"
Predef.assert(Converter.toJsonUnsafe(a) == JsNumber(42))
// "convert the right side of an Either value to Json"
Predef.assert(Converter.toJsonUnsafe(b) == JsString("Hello"))
// "convert the left side of an Either value from Json"
Predef.assert(Converter.fromJsonUnsafe[Either[Int, String]](JsNumber(42)) == Left(42))
// "convert the right side of an Either value from Json"
Predef.assert(Converter.fromJsonUnsafe[Either[Int, String]](JsString("Hello")) == Right("Hello"))
}
}
| eed3si9n/sjson-new | support/spray/src/test/scala/sjsonnew/support/spray/StandardFormatsSpec.scala | Scala | apache-2.0 | 2,986 |
package controllers
import java.net.URLDecoder
import java.util.UUID
import javax.inject.Inject
import com.mohiva.play.silhouette.api._
import com.mohiva.play.silhouette.impl.providers.CredentialsProvider
import models.services.{ AuthTokenService, UserService }
import play.api.i18n.{ I18nSupport, Messages, MessagesApi }
import play.api.libs.concurrent.Execution.Implicits._
import play.api.libs.mailer.{ Email, MailerClient }
import play.api.mvc.Controller
import utils.auth.DefaultEnv
import scala.concurrent.Future
import scala.language.postfixOps
/**
* The `Activate Account` controller.
*
* @param messagesApi The Play messages API.
* @param silhouette The Silhouette stack.
* @param userService The user service implementation.
* @param authTokenService The auth token service implementation.
* @param mailerClient The mailer client.
* @param webJarAssets The WebJar assets locator.
*/
class ActivateAccountController @Inject() (
val messagesApi: MessagesApi,
silhouette: Silhouette[DefaultEnv],
userService: UserService,
authTokenService: AuthTokenService,
mailerClient: MailerClient
)
extends Controller with I18nSupport {
/**
* Sends an account activation email to the user with the given email.
*
* @param email The email address of the user to send the activation mail to.
* @return The result to display.
*/
def send(email: String) = silhouette.UnsecuredAction.async { implicit request =>
val decodedEmail = URLDecoder.decode(email, "UTF-8")
val loginInfo = LoginInfo(CredentialsProvider.ID, decodedEmail)
val result = Redirect(routes.SignInController.view()).flashing("info" -> Messages("activation.email.sent", decodedEmail))
userService.retrieve(loginInfo).flatMap {
case Some(user) if !user.activated =>
authTokenService.create(user.userID).map { authToken =>
val url = routes.ActivateAccountController.activate(authToken.id).absoluteURL()
mailerClient.send(Email(
subject = Messages("email.activate.account.subject"),
from = Messages("email.from"),
to = Seq(decodedEmail),
bodyText = Some(views.txt.emails.activateAccount(user, url).body),
bodyHtml = Some(views.html.emails.activateAccount(user, url).body)
))
result
}
case None => Future.successful(result)
}
}
/**
* Activates an account.
*
* @param token The token to identify a user.
* @return The result to display.
*/
def activate(token: UUID) = silhouette.UnsecuredAction.async { implicit request =>
authTokenService.validate(token).flatMap {
case Some(authToken) => userService.retrieve(authToken.userID).flatMap {
case Some(user) if user.loginInfo.providerID == CredentialsProvider.ID =>
userService.save(user.copy(activated = true)).map { _ =>
Redirect(routes.SignInController.view()).flashing("success" -> Messages("account.activated"))
}
case _ => Future.successful(Redirect(routes.SignInController.view()).flashing("error" -> Messages("invalid.activation.link")))
}
case None => Future.successful(Redirect(routes.SignInController.view()).flashing("error" -> Messages("invalid.activation.link")))
}
}
}
| serversideapps/silhmojs | server/app/controllers/ActivateAccountController.scala | Scala | apache-2.0 | 3,299 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.kernel.interpreter.sparkr
import java.net.URL
import org.apache.toree.interpreter.Results.Result
import org.apache.toree.interpreter._
import org.apache.toree.kernel.api.KernelLike
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.slf4j.LoggerFactory
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.tools.nsc.interpreter.{InputStream, OutputStream}
/**
* Represents an interpreter interface to SparkR. Requires a properly-set
* SPARK_HOME pointing to a binary distribution (needs packaged SparkR library)
* and an implementation of R on the path.
*
*/
class SparkRInterpreter(
) extends Interpreter {
private val logger = LoggerFactory.getLogger(this.getClass)
private var _kernel: KernelLike = _
// TODO: Replace hard-coded maximum queue count
/** Represents the state used by this interpreter's R instance. */
private lazy val sparkRState = new SparkRState(500)
/** Represents the bridge used by this interpreter's R instance. */
private lazy val sparkRBridge = SparkRBridge(
sparkRState,
_kernel
)
/** Represents the interface for R to talk to JVM Spark components. */
private lazy val rBackend = new ReflectiveRBackend
/** Represents the process handler used for the SparkR process. */
private lazy val sparkRProcessHandler: SparkRProcessHandler =
new SparkRProcessHandler(
sparkRBridge,
restartOnFailure = true,
restartOnCompletion = true
)
private lazy val sparkRService = new SparkRService(
rBackend,
sparkRBridge,
sparkRProcessHandler
)
private lazy val sparkRTransformer = new SparkRTransformer
override def init(kernel: KernelLike): Interpreter = {
_kernel = kernel
this
}
/**
* Executes the provided code with the option to silence output.
* @param code The code to execute
* @param silent Whether or not to execute the code silently (no output)
* @return The success/failure of the interpretation and the output from the
* execution or the failure
*/
override def interpret(code: String, silent: Boolean):
(Result, Either[ExecuteOutput, ExecuteFailure]) =
{
if (!sparkRService.isRunning) sparkRService.start()
val futureResult = sparkRTransformer.transformToInterpreterResult(
sparkRService.submitCode(code)
)
Await.result(futureResult, Duration.Inf)
}
/**
* Starts the interpreter, initializing any internal state.
* @return A reference to the interpreter
*/
override def start(): Interpreter = {
sparkRService.start()
this
}
/**
* Stops the interpreter, removing any previous internal state.
* @return A reference to the interpreter
*/
override def stop(): Interpreter = {
sparkRService.stop()
this
}
/**
* Returns the class loader used by this interpreter.
*
* @return The runtime class loader used by this interpreter
*/
override def classLoader: ClassLoader = this.getClass.getClassLoader
// Unsupported (but can be invoked)
override def lastExecutionVariableName: Option[String] = None
// Unsupported (but can be invoked)
override def read(variableName: String): Option[AnyRef] = None
// Unsupported (but can be invoked)
override def completion(code: String, pos: Int): (Int, List[String]) =
(pos, Nil)
// Unsupported
override def updatePrintStreams(in: InputStream, out: OutputStream, err: OutputStream): Unit = ???
// Unsupported
override def classServerURI: String = ""
// Unsupported (but can be invoked)
override def bindSparkContext(sparkContext: SparkContext): Unit = {}
// Unsupported (but can be invoked)
override def bindSqlContext(sqlContext: SQLContext): Unit = {}
// Unsupported
override def interrupt(): Interpreter = ???
// Unsupported
override def bind(variableName: String, typeName: String, value: Any, modifiers: List[String]): Unit = ???
// Unsupported
override def addJars(jars: URL*): Unit = ???
// Unsupported
override def doQuietly[T](body: => T): T = ???
}
| asorianostratio/incubator-toree | sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRInterpreter.scala | Scala | apache-2.0 | 4,897 |
/*
* Copyright 2007-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package mapper
import org.specs2.mutable.Specification
import common._
import util._
import http.{S, LiftSession}
/**
* Systems under specification for DB.
*/
class DbSpec extends Specification {
"DB Specification".title
val provider = DbProviders.H2MemoryProvider
val logF = Schemifier.infoF _
def cleanup(): Unit = {
provider.setupDB
Schemifier.destroyTables_!!(DefaultConnectionIdentifier, logF , User)
Schemifier.schemify(true, logF, DefaultConnectionIdentifier, User)
}
"DB" should {
"collect queries when queryCollector is added as logFunc" in {
cleanup()
DB.addLogFunc(DB.queryCollector)
var statements: List[(String, Long)] = Nil
S.addAnalyzer((r,t,ss) => statements=ss)
val session = new LiftSession("hello", "", Empty)
val elwood = S.initIfUninitted(session) {
val r = User.find(By(User.firstName, "Elwood"))
S.queryLog.size must_== 1
r
}
statements.size must_== 1
elwood.map( _.firstName.get) must_== Full("Elwood")
}
}
}
| lift/framework | persistence/mapper/src/test/scala/net/liftweb/mapper/DbSpec.scala | Scala | apache-2.0 | 1,739 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import kafka.common.AppInfo
import kafka.utils.Logging
class KafkaServerStartable(val serverConfig: KafkaConfig) extends Logging {
private val server = new KafkaServer(serverConfig)
// 启动Kafka服务器
def startup() {
try {
server.startup()
AppInfo.registerInfo()
}
catch {
case e: Throwable =>
fatal("Fatal error during KafkaServerStartable startup. Prepare to shutdown", e)
// KafkaServer already calls shutdown() internally, so this is purely for logging & the exit code
System.exit(1)
}
}
def shutdown() {
try {
server.shutdown()
}
catch {
case e: Throwable =>
fatal("Fatal error during KafkaServerStable shutdown. Prepare to halt", e)
System.exit(1)
}
}
/**
* Allow setting broker state from the startable.
* This is needed when a custom kafka server startable want to emit new states that it introduces.
*/
def setServerState(newState: Byte) {
server.brokerState.newState(newState)
}
def awaitShutdown() =
server.awaitShutdown
}
| DavidAlphaFox/kafka | core/src/main/scala/kafka/server/KafkaServerStartable.scala | Scala | apache-2.0 | 1,910 |
package com.holdenkarau.spark.testing
class MultipleDataFrameSuites extends DataFrameSuiteBase {
test("test nothing") {
assert(1 === 1)
}
}
| mahmoudhanafy/spark-testing-base | src/test/1.3/scala/com/holdenkarau/spark/testing/MultipleDataFrameSuites.scala | Scala | apache-2.0 | 149 |
package im.actor.server.model.social
final case class RelationStatus(intValue: Int)
object RelationStatus {
val Approved = RelationStatus(0)
val Blocked = RelationStatus(1)
} | actorapp/actor-platform | actor-server/actor-models/src/main/scala/im/actor/server/model/social/RelationStatus.scala | Scala | agpl-3.0 | 179 |
package blended.security.ssl.internal
import java.io.File
import blended.security.ssl.{CertificateChange, CertificateRequestBuilder, CertificateSigner, MemoryKeystore, SecurityTestSupport}
import blended.testsupport.BlendedTestSupport
import blended.testsupport.scalatest.LoggingFreeSpec
import org.scalatest.matchers.should.Matchers
class TrustStoreRefresherSpec extends LoggingFreeSpec
with Matchers
with SecurityTestSupport
with CertificateRequestBuilder
with CertificateSigner {
"The truststore refresher" - {
val pwd : String = "trust"
val ms : MemoryKeystore = MemoryKeystore(Map("root" -> createRootCertificate(cn = "root").get.copy(change = CertificateChange.Added)))
"not update anything if the truststore properties are not set" in {
System.clearProperty(SslContextProvider.propTrustStorePwd)
System.clearProperty(SslContextProvider.propTrustStore)
val trustStore : Option[MemoryKeystore] = new TrustStoreRefresher(ms).refreshTruststore().get
trustStore should be(empty)
}
"update the truststore with all missing root certificates from a given key store" in {
val f : File = new File(BlendedTestSupport.projectTestOutput, "trust.jks")
f.delete()
System.setProperty(SslContextProvider.propTrustStorePwd, pwd)
System.setProperty(SslContextProvider.propTrustStore, f.getAbsolutePath())
val trustStore : Option[MemoryKeystore] = new TrustStoreRefresher(ms).refreshTruststore().get
trustStore should be(defined)
trustStore.get.certificates should have size 1
val updated : MemoryKeystore = new JavaKeystore(f, pwd.toCharArray(), None).loadKeyStore().get
updated.certificates should have size 1
}
"do not update the truststore if all root certificates from a given keystore already exist" in {
val f : File = new File(BlendedTestSupport.projectTestOutput, "trust.jks")
f.delete()
val jks : JavaKeystore = new JavaKeystore(f, pwd.toCharArray(), None)
jks.saveKeyStore(ms)
System.setProperty(SslContextProvider.propTrustStorePwd, pwd)
System.setProperty(SslContextProvider.propTrustStore, f.getAbsolutePath())
val trustStore : Option[MemoryKeystore] = new TrustStoreRefresher(ms).refreshTruststore().get
trustStore should be(defined)
trustStore.get.certificates should have size 1
val updated : MemoryKeystore = new JavaKeystore(f, pwd.toCharArray(), None).loadKeyStore().get
updated.certificates should have size 1
}
}
}
| woq-blended/blended | blended.security.ssl/src/test/scala/blended/security/ssl/internal/TrustStoreRefresherSpec.scala | Scala | apache-2.0 | 2,531 |
// 1EC Graph Parser
// Copyright (c) University of California
// Copyright (c) Jonathan Kummerfeld
//
// This software is covered by a license. See the LICENSE.txt file in the
// top-level directory of this distribution or at
// https://github.com/jkkummerfeld/1ec-graph-parser for the full text of the
// license.
package edu.berkeley.nlp.graphparser.psg
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet, Queue}
import edu.berkeley.nlp.graphparser.Config
import edu.berkeley.nlp.graphparser.Parse
/** A single node in a parse. The instance is defined relative to a spine,
* since it coontains the count and nullDepth (which depend on the surrounding
* structure).
*
* @param symbol the core non-terminal string (e.g. NP)
* @param functions any function tags (e.g. -LOC-CLR)
* @param count to distinguish repetition of the same node in a spine
* @param nullDepth the depth within null spans (e.g. 0 for non-null)
*/
@SerialVersionUID(1L)
class Node(
val symbol: String,
val functions: Vector[String],
val count: Int,
val nullDepth: Int
) extends Serializable {
@transient lazy val isNull = nullDepth > 0
@transient lazy val symbolToString = symbol + functions.map("-"+ _).mkString("")
override def toString() = s"$symbolToString,$nullDepth.$count"
def canEqual(other: Any): Boolean =
other.isInstanceOf[Node]
def equals(other: Any, ignoreNullDepth: Boolean): Boolean =
other match {
case that: Node =>
(that canEqual this) &&
that.symbol == symbol &&
that.functions == functions &&
that.count == count &&
(ignoreNullDepth || that.nullDepth == nullDepth)
case _ => false
}
override def equals(other: Any): Boolean = equals(other, false)
}
object Node {
val None = new Node("NO_TAG", Vector[String](), 0, 0)
val Root = new Node("ROOT", Vector[String](), 0, 0)
def apply(
symbol: String, functions: Vector[String], count: Int, nullDepth: Int
) = new Node(symbol, functions, count, nullDepth)
}
/** A single edge in a parse.
*
* @param src index of the child word/spine
* @param srcNode symbol of the child
* @param target index of the parent word/spine
* @param targetNode symbol of the parent
* @param trace the trace type on this edge
*/
@SerialVersionUID(1L)
class Edge(
val src: Int,
val srcNode: Node,
val target: Int,
val targetNode: Node,
val trace: String
) extends Serializable {
// Non-traces may not link to/from null elements
require(trace != "_" || (! srcNode.isNull && ! targetNode.isNull))
@transient lazy val isTrace = trace != "_"
@transient lazy val isStructural = ! isTrace
override def toString() = {
val targetString = targetNode.symbolToString
if (isStructural) s"$target ${targetString}_${targetNode.count}"
else {
val targetNull = targetNode.isNull.toString()(0).toUpper
val srcNull = srcNode.isNull.toString()(0).toUpper
val srcString = srcNode.symbolToString
target.toString +
s" ${targetString}_${targetNode.count} $targetNull" +
s" ${srcString}_${srcNode.count} $srcNull" +
s" $trace"
}
}
def canEqual(other: Any): Boolean =
other.isInstanceOf[Edge]
override def equals(other: Any): Boolean = equals(other, false)
def equals(other: Any, ignoreLabel: Boolean): Boolean =
other match {
case that: Edge =>
(that canEqual this) &&
that.src == src &&
that.target == target &&
(ignoreLabel || that.srcNode == srcNode) &&
(ignoreLabel || that.targetNode == targetNode) &&
(ignoreLabel || that.trace == trace)
case _ => false
}
def crosses(other: Edge) = {
val thisLeft = src.min(target)
val thisRight = src.max(target)
val thatLeft = other.src.min(other.target)
val thatRight = other.src.max(other.target)
(thisLeft < thatLeft &&
thatLeft < thisRight &&
thisRight < thatRight) ||
(thatLeft < thisLeft &&
thisLeft < thatRight &&
thatRight < thisRight)
}
}
object Edge {
def apply(
src: Int, srcNode: Node, target: Int, targetNode: Node, trace: String
) = new Edge(src, srcNode, target, targetNode, trace)
}
/*
* The traces vector covers self-edges: edges that go from one point in the
* spine to another.
*/
@SerialVersionUID(1L)
class Spine(
val nodes: Vector[Node],
val traces: Vector[Edge]
) extends Serializable {
def withTrace(trace: Edge) = new Spine(nodes, traces :+ trace)
def posInNodes(
symbol: String, functions: Vector[String], count: Int, isNull: Boolean
) : Int = {
if (nodes.length == 0) -1
else nodes.indexWhere{ s =>
s.symbol == symbol &&
s.functions == functions &&
s.count == count &&
s.isNull == isNull
}
}
def posInNodes(node: Node) : Int =
posInNodes(node.symbol, node.functions, node.count, node.isNull)
def getMatchingNode(
symbol: String, functions: Vector[String], count: Int, isNull: Boolean
) = {
val pos = posInNodes(symbol, functions, count, isNull)
if (pos == -1) null
else nodes(pos)
}
def getMatchingNode(node: Node) : Node =
getMatchingNode(node.symbol, node.functions, node.count, node.isNull)
@transient lazy val nodesToString = {
if (nodes.length == 0) "_"
else {
var depth = 0
var ans = ""
for (node <- nodes) {
// Add the closing brackets for the previous node
if (depth > 0) {
for (i <- node.nullDepth until depth) ans += ")"
if (node.nullDepth > 0 && node.nullDepth <= depth) ans += ")"
}
if (ans.length > 0) ans += "_"
if (node.nullDepth > 0) ans += "("
ans += node.symbolToString
depth = node.nullDepth
}
ans
}
}
@transient lazy val tracesToString : String =
traces.map{ t =>
s"${t.srcNode} ${t.targetNode} ${t.trace}"
}.mkString(" ")
override def toString() : String =
nodesToString +" "+ tracesToString
def canEqual(other: Any): Boolean =
other.isInstanceOf[Spine]
override def equals(other: Any): Boolean =
equals(other, false)
def equals(other: Any, ignoreTraceEdges: Boolean): Boolean =
other match {
case that: Spine =>
(that canEqual this) &&
that.nodes == nodes &&
(ignoreTraceEdges || that.traces == traces)
case _ => false
}
}
object Spine {
def fromText(text: String) = {
// Example:
// VP_VP_(NP-SBJ_(*))_S_VP_VP_S
val counts = HashMap[(String, Vector[String], Boolean), Int]()
val nodes = ArrayBuffer[Node]()
if (text != "_") {
var depth = 0
for (part <- text.split("_")) {
// Track depth of traces
if (part.contains("(")) depth += 1
if (Config.keepTraces || depth == 0) {
val symbolParts = part.filter(c => c != '(' && c != ')').split("-")
val symbol = symbolParts.head
val functions = symbolParts.tail.toVector
val key = (symbol, functions, depth > 0)
val count = counts.getOrElse(key, -1) + 1
counts(key) = count
nodes.append(new Node(symbol, functions, count, depth))
}
// There can only be one open bracket, but there could be multiple
// close brackets. For example:
// (NP-SBJ_(*))
for (char <- part) if (char == ')') depth -= 1
}
}
new Spine(nodes.toVector, Vector[Edge]())
}
}
class Graph(
sentence: String,
tokens: Vector[String],
tags: Vector[String],
val spines: Vector[Spine],
val edges: Vector[Edge]
) extends Parse(sentence, tokens, tags) {
def genStringLines(
spinesOnly: Boolean, arcsOnly: Boolean, tracesOnly: Boolean
) = {
val maxes = Array(0, 0, 0, 0, 0, 0)
val lines = for (position <- 0 until tags.length) yield {
val number = (position+1).toString
maxes(0) = maxes(0).max(number.length)
val token = tokens(position)
maxes(1) = maxes(1).max(token.length)
val tag = tags(position)
maxes(2) = maxes(2).max(tag.length)
val spine =
if (arcsOnly || tracesOnly) "___"
else spines(position).nodesToString
maxes(3) = maxes(3).max(spine.length)
val structuralEdge = {
val parent =
if (position == 0) spines.length
else position - 1
val options = edges.filter{ edge =>
edge.isStructural && edge.src == position
}
if (options.length > 0) options.head
else new Edge(position, Node.None, parent, Node.None, "?")
}
val parent =
if (structuralEdge.target == spines.length) "0"
else (structuralEdge.target.toInt + 1).toString
maxes(4) = maxes(4).max(parent.length)
val structuralSymbol =
if (structuralEdge.target == spines.length) "ROOT_0"
else structuralEdge.toString.split(" ")(1)
maxes(5) = maxes(5).max(structuralSymbol.length)
val traceEdges =
if (spinesOnly) "___"
else {
val combinedTraces =
spines(position).traces ++
edges.filter{ edge =>
edge.isTrace && edge.src == position
}
combinedTraces.map{ edge =>
new Edge(edge.src + 1, edge.srcNode, edge.target + 1,
edge.targetNode, edge.trace)
}.map(" " + _.toString).mkString("").trim
}
(number, token, tag, spine, parent, structuralSymbol, traceEdges)
}
val fieldFormat = s"%${maxes(0)}s %-${maxes(1)}s %-${maxes(2)}s %-${maxes(3)}s %${maxes(4)}s %-${maxes(5)}s %s"
lines.map{ parts =>
String.format(fieldFormat, parts._1, parts._2, parts._3, parts._4,
parts._5, parts._6, parts._7)
}
}
@transient lazy val stringLines = genStringLines(false, false, false)
@transient lazy val stringLinesArcs = genStringLines(false, true, false)
@transient lazy val stringLinesTraces = genStringLines(false, false, true)
@transient lazy val stringLinesSpines = genStringLines(true, false, false)
override def toString() = {
val base = super.toString()
val graphType = s"# crossing: $crossingType selfEdge: $hasSelfEdge doubleEdge: $hasDoubleEdge cycle: $cycleType\\n"
base + graphType + stringLines.mkString("\\n") + "\\n"
}
override def compareString(
other: Parse, spinesOnly: Boolean = false, arcsOnly: Boolean = false,
tracesOnly: Boolean = false
) = {
other match {
case psg: Graph =>
val thisLines =
if (spinesOnly) stringLinesSpines
else if (tracesOnly) stringLinesTraces
else if (arcsOnly) stringLinesArcs
else stringLines
val maxLength = thisLines.map(_.length).max + 2
val otherLines =
if (spinesOnly) psg.stringLinesSpines
else if (tracesOnly) psg.stringLinesTraces
else if (arcsOnly) psg.stringLinesArcs
else psg.stringLines
( for ((thisLine, otherLine) <- thisLines.zip(otherLines))
yield {
val thisLineSimple = thisLine.split(" *").mkString(" ")
val otherLineSimple = otherLine.split(" *").mkString(" ")
if (thisLineSimple == otherLineSimple) thisLine
else {
// TODO: Break them up into tokens and colour the tokens that
// differ
val fmt = "%-"+ maxLength.toString +"s %s"
String.format(fmt, thisLine, otherLine)
}
}
).mkString("\\n") + "\\n"
case _ =>
super.compareString(other)
}
}
/* Projective - check that no pair of dependencies cross.
* 1 Endpoint Crossing - check that for each edge that is crossed, all
* the edges crossing it share an endpoint.
* Other - All remaining structures
*/
@transient lazy val crossingType = {
var projective = true
var oneEC = true
for (edge0 <- edges) {
val left0 = edge0.src.min(edge0.target)
val right0 = edge0.src.max(edge0.target)
val inside = HashSet[Int]()
val outside = HashSet[Int]()
for (edge1 <- edges) {
val left1 = edge1.src.min(edge1.target)
val right1 = edge1.src.max(edge1.target)
if (left0 < left1 && left1 < right0 && right0 < right1) {
inside.add(left1)
outside.add(right1)
} else if (left1 < left0 && left0 < right1 && right1 < right0) {
inside.add(right1)
outside.add(left1)
}
}
if (inside.size != 0 || outside.size != 0) projective = false
if (inside.size > 1 && outside.size > 1) oneEC = false
}
(projective, oneEC)
}
/* Is there an edge between a point and itself, such as:
# Parse (ROOT
# Parse (FRAG
# Parse (NP
# Parse (NP (NNS Slides) )
# Parse (SBAR
# Parse (WHNP-1 (-NONE- 0) )
# Parse (S
# Parse (NP (-NONE- *T*-1) )
# Parse (VP (TO to)
# Parse (VP (VB illustrate)
# Parse (NP (NNP Shostakovich) (NNS quartets) ))))))
# Parse (. ?) ))
# Sentence Slides to illustrate Shostakovich quartets ?
# Tokens 1 Slides 2 to 3 illustrate 4 Shostakovich 5 quartets 6 ?
# Identity 1 (1, 1) WHNP-1 True
# Reference 1 (1, 1) *T*-1
# Empty (1, 1) 0
# Graph type proj graph no-cycle no-cycle-alt has-cycle1 no-double
1 Slides NNS NP_NP_FRAG 0 ROOT_0
2 to TO _ 3 VP_1
3 illustrate VB VP_VP_(NP_(*T*))_S_(WHNP_(0))_SBAR 1 NP_1 3 WHNP_0 T NP_0 T *T*
4 Shostakovich NNP _ 5 NP_0
5 quartets NNS NP 3 VP_0
6 ? . _ 1 FRAG_0
*/
@transient lazy val hasSelfEdge =
edges.exists{ e => e.src == e.target } ||
spines.exists{ _.traces.length > 0 }
/* Same edge multiple times:
# Parse (ROOT
# Parse (S
# Parse (S-1
# Parse (NP (DT The) (NNP SEC) )
# Parse (VP (MD will)
# Parse (ADVP (RB probably) )
# Parse (VP (VB vote)
# Parse (PP (IN on)
# Parse (NP (DT the) (NN proposal) ))
# Parse (NP (RB early) (JJ next) (NN year) ))))
# Parse (, ,)
# Parse (NP (PRP he) )
# Parse (VP (VBD said)
# Parse (SBAR (-NONE- 0)
# Parse (S (-NONE- *T*-1) )))
# Parse (. .) ))
# Sentence The SEC will probably vote on the proposal early next year , he said .
# Tokens 1 The 2 SEC 3 will 4 probably 5 vote 6 on 7 the 8 proposal 9 early 10 next 11 year 12 , 13 he 14 said 15 .
# Identity 1 (0, 11) S-1 False
# Reference 1 (14, 14) *T*-1
# Empty (14, 14) 0
# Graph type proj graph no-cycle no-cycle-alt no-cycle1 has-double
1 The DT _ 2 NP_0
2 SEC NNP NP 5 S_0
3 will MD _ 5 VP_1
4 probably RB ADVP 5 VP_1
5 vote VB VP_VP_S 14 S_0 14 S_0 T S_0 F *T*
6 on IN PP 5 VP_0
7 the DT _ 8 NP_0
8 proposal NN NP 6 PP_0
9 early RB _ 11 NP_0
10 next JJ _ 11 NP_0
11 year NN NP 5 VP_0
12 , , _ 14 S_0
13 he PRP NP 14 S_0
14 said VBD (SBAR_(0)_(S_(*T*)))_VP_S 0 ROOT_0
15 . . _ 14 S_0
*/
@transient lazy val hasDoubleEdge =
edges.exists{ e =>
edges.exists{ f =>
e.src == f.src && e.target == f.target && e != f
}
}
/* Tree / DAG / Other
* Return true/false for Tree, and max(shortest cycle for each word)
# Parse (ROOT
# Parse (S-1 (CC But)
# Parse (ADVP (RB lately) )
# Parse (PRN (, ,)
# Parse (S
# Parse (NP (NNS retailers) )
# Parse (VP (VBP say)
# Parse (SBAR (-NONE- 0)
# Parse (S (-NONE- *T*-1) ))))
# Parse (, ,) )
# Parse (NP (JJ fake) )
# Parse (VP (VBZ has)
# Parse (VP (VBN become)
# Parse (ADJP (RBR more) (JJ fashionable) )))
# Parse (. .) ))
# Sentence But lately , retailers say , fake has become more fashionable .
# Tokens 1 But 2 lately 3 , 4 retailers 5 say 6 , 7 fake 8 has 9 become 10 more 11 fashionable 12 .
# Identity 1 (0, 12) S-1 False
# Reference 1 (5, 5) *T*-1
# Empty (5, 5) 0
# Graph type proj graph has-cycle has-cycle-alt no-cycle1 no-double
1 But CC S 0 ROOT_0 5 S_0 T S_0 F *T*
2 lately RB ADVP 1 S_0
3 , , _ 5 PRN_0
4 retailers NNS NP 5 S_0
5 say VBP (SBAR_(0)_(S_(*T*)))_VP_S_PRN 1 S_0
6 , , _ 5 PRN_0
7 fake JJ NP 1 S_0
8 has VBZ _ 9 VP_1
9 become VBN VP_VP 1 S_0
10 more RBR _ 11 ADJP_0
11 fashionable JJ ADJP 9 VP_0
12 . . _ 1 S_0
*/
@transient lazy val cycleType = {
val tree = edges.length == tokens.length
val edgeMap = HashMap[Int, HashSet[Int]]()
for (edge <- edges) {
val map = edgeMap.getOrElseUpdate(edge.src, new HashSet[Int])
map.add(edge.target)
}
val dag =
if (edges.length <= tokens.length) -1
else (0 until tokens.length).map{ start =>
val queue = Queue((start, 0))
val seen = HashSet(start)
var cycleLength = -1
while (queue.length > 0) {
val (cur, dist) = queue.dequeue
if (cur < tokens.length && edgeMap.contains(cur)) {
for (parent <- edgeMap(cur)) {
if (parent == start && cycleLength < 0) cycleLength = dist + 1
if (! seen.contains(parent)) {
queue.enqueue((parent, dist+1))
seen.add(parent)
}
}
}
}
cycleLength
}.max
(tree, dag)
}
// The idea here is to provide a set of edges that do not contain a loop or
// 1ec violation, cutting a small number of edges out if necessary
@transient lazy val dag1ecEdges = {
val excluded = new HashSet[Edge]
val edgeMap = HashMap[Int, HashSet[Edge]]()
val traceEdges = new HashSet[Edge]
for (edge <- edges) {
edgeMap.getOrElseUpdate(edge.src, new HashSet[Edge]).add(edge)
if (edge.isTrace) traceEdges.add(edge)
}
// Remove edges one at a time until we are acyclic
var done = false
while (! done) {
// Identify all trace edges that are part of a 1ec violation (note those
// that are doing the crossing, and those that are multi-crossed).
val problemEdges = new HashMap[Edge, (Int, Int)]
for (edge <- edges
if ! excluded.contains(edge)) {
val left = edge.src.min(edge.target)
val right = edge.src.max(edge.target)
var inside = new HashSet[Int]
var outside = new HashSet[Int]
edges.filter{ e => e.crosses(edge) && ! excluded.contains(e) }.foreach{ e =>
val (cin, cout) =
if (left < e.src && e.src < right) (e.src, e.target)
else (e.target, e.src)
inside.add(cin)
outside.add(cout)
}
if (inside.size > 1 && outside.size > 1) {
if (edge.isTrace) {
val count = problemEdges.getOrElse(edge, (0, 0))
problemEdges(edge) = (count._1 + 1, 0)
} else {
edges.filter{ e =>
e.crosses(edge) &&
e.isTrace &&
! excluded.contains(e)
}.foreach{ e =>
val count = problemEdges.getOrElse(e, (0, 0))
problemEdges(e) = (count._1 + 1, 0)
}
}
}
}
// Start from those edges, and see if they are part of cycles.
for (edge <- traceEdges
if ! excluded.contains(edge)) {
// Do a BFS
val queue = Queue(edge.target)
val seen = HashSet(edge.src)
var found = 0 // approximately tracks number of cycles (not the actual amount, which is non-trivial to calculate)
while (queue.length > 0) {
val cur = queue.dequeue
// Don't go to the root
if (cur < tokens.length) {
// Consider each edge leaving here
for (edge2 <- edgeMap(cur)) {
if (! excluded.contains(edge2)) {
val parent = edge2.target
if (parent == edge.src) found += 1
if (! seen.contains(parent)) {
queue.enqueue(parent)
seen.add(parent)
}
}
}
}
}
if (found > 0) {
val count = problemEdges.getOrElse(edge, (0, 0))
problemEdges(edge) = (count._1, found)
}
}
// Remove the worst edge (part of a cycle and/or still 1ec involved)
if (problemEdges.size == 0) done = true
else {
val maxCount = problemEdges.map( _._2._1 ).max
val maxFound = problemEdges.map( _._2._2 ).max
var worst : (Edge, Double) = (null, 0.0)
for ((edge, (count, found)) <- problemEdges) {
val countScore =
if (maxCount == 0) 1.0
else count.toDouble / maxCount
val foundScore =
if (maxFound == 0) 1.0
else found.toDouble / maxFound
val overall = countScore + foundScore
if (overall > worst._2) worst = (edge, overall)
}
excluded.add(worst._1)
}
}
edges.filter( ! excluded.contains(_) )
}
@transient lazy val toTriples = {
def edgeToTriple(edge: Edge) = {
val siblingString =
if (edge.target == tokens.length) ""
else {
val siblingPos = spines(edge.target).posInNodes(edge.targetNode) - 1
if (siblingPos < 0) "NO_TAG"
else spines(edge.target).nodes(siblingPos).toString
}
(edge.src, edge.target, edge.srcNode.toString,
edge.targetNode.toString, siblingString, edge.trace)
}
edges.map{ edgeToTriple(_) } ++
spines.flatMap{ _.traces.map{ edgeToTriple(_) } }
}
@transient lazy val toSpans = {
val spans = HashMap[(Int, Node), (Int, Int)]()
def getSpan(pos: Int, node: Node) : (Int, Int) = {
spans.getOrElseUpdate((pos, node), {
var min = pos
var max = pos + 1
// Look at all edges leading here, get the span of their top symbol and
// use that to work out what words this position spans.
for (edge <- edges) {
if (edge.target == pos && edge.targetNode == node) {
val span = getSpan(edge.src, edge.srcNode)
if (span._1 < min) min = span._1
if (span._2 > max) max = span._2
}
}
// Look at symbols beneath this one in the spine, use their span to
// update the span as well.
spines(pos).nodes.forall{ subnode =>
if (subnode == node) false
else if (subnode.isNull) true
else {
val span = getSpan(pos, subnode)
if (span._1 < min) min = span._1
if (span._2 > max) max = span._2
true
}
}
(min, max)
})
}
// First do regular non-terminal nodes
for {(spine, index) <- spines.zipWithIndex
node <- spine.nodes
if ! node.isNull
} spans((index, node)) = getSpan(index, node)
// Now do null elements
for {(spine, index) <- spines.zipWithIndex
(node, cur) <- spine.nodes.zipWithIndex
if node.isNull
} {
// Go up in the spine until a regular non-terminal is reached. Use it's
// span to place the null element. For simplicity, always put it on the
// far left (in practise the rules are much more complex).
((cur + 1) until spine.nodes.length).exists{ pos =>
if (spine.nodes(pos).isNull) false
else {
val cspan = getSpan(index, spine.nodes(pos))
spans((index, node)) = (cspan._1, cspan._1)
true
}
}
}
val ans = HashSet[(String, Int, Int)]()
for (((index, node), (left, right)) <- spans) {
// This is necessary because spines like "_" get a span stored for
// convenience.
if (spines(index).nodes.length > 0)
ans.add( (node.symbolToString, left, right) )
}
ans
}
}
object Graph {
def apply(sentence: String, tokens: Vector[String], tags: Vector[String],
spines: Vector[Spine], edges: Vector[Edge]) =
new Graph(sentence, tokens, tags, spines, edges)
def fromText(lines: ArrayBuffer[String]) = {
/* Example:
1 We PRP NP-SBJ 5 S_1 5 NP-SBJ_0 T NP-SBJ_0 F *
2 're VBP _ 5 VP_3
3 about IN _ 5 VP_2
4 to TO _ 5 VP_1
5 see VB VP_VP_(NP-SBJ_(*))_S_VP_VP_S 0 ROOT_0
6 if IN _ 8 SBAR_0
7 advertising NN NP-SBJ 8 S_0
8 works VBZ VP_S_SBAR 5 VP_0
9 . . _ 5 S_1
* Note, we are guaranteed that the last symbol in a spine is either _ or a
* regular non-terminal node (not a null element). Null elements are placed
* before the node they are under.
*/
val tokens = new ArrayBuffer[String]
val edges = new ArrayBuffer[Edge]
val spines = new ArrayBuffer[Spine]
val tags = new ArrayBuffer[String]
// Make spines
val sentenceLength = lines.length
for (line <- lines) {
val parts = line.trim().split(" *")
tokens.append(parts(1))
tags.append(parts(2))
val spine = Spine.fromText(parts(3))
spines.append(spine)
}
// Make structural edges
for ((line, srcPos) <- lines.zipWithIndex) {
val parts = line.trim().split(" *")
val spine = spines(srcPos)
val targetPos =
if (parts(4) == "0") sentenceLength
else parts(4).toInt - 1
val srcNode =
if (spine.nodes.length == 0) Node.None
else spine.nodes.last
val targetNode =
if (targetPos == sentenceLength) Node.Root
else {
val symbol = parts(5).split("_").head.split("-").head
val functions = parts(5).split("_").head.split("-").tail.toVector
val count = parts(5).split("_").last.toInt
spines(targetPos).getMatchingNode(symbol, functions, count, false)
}
edges.append(new Edge(srcPos, srcNode, targetPos, targetNode, "_"))
}
// Add traces
// Note - Other parts of the system assume that these edges are added
// after all the structural edges (so they appear later in the edge list).
// TODO: Should this be pushed into pre-processing of data? (ie. we always
// do traces, some data just doesn't have them)
if (Config.keepTraces) {
for ((line, srcPos) <- lines.zipWithIndex) {
val parts = line.trim().split(" *")
for (pos <- (6 until parts.length by 6)) {
val targetPos = parts(pos).toInt - 1
val targetNode = {
val subparts = parts(pos + 1).split("_")
val symbol = subparts.head.split("-").head
val functions = subparts.head.split("-").tail.toVector
val count = subparts.last.toInt
val isNull = parts(pos + 2) == "T"
spines(targetPos).getMatchingNode(symbol, functions, count, isNull)
}
val srcNode = {
val subparts = parts(pos + 3).split("_")
val symbol = subparts.head.split("-").head
val functions = subparts.head.split("-").tail.toVector
val count = subparts.last.toInt
val isNull = parts(pos + 4) == "T"
spines(srcPos).getMatchingNode(symbol, functions, count, isNull)
}
val trace = parts(pos + 5)
edges.append(new Edge(srcPos, srcNode, targetPos, targetNode, trace))
}
}
}
// Move loop edges from the edge list into their spines
edges.filter{ e => e.src == e.target }.foreach{ e =>
spines(e.src) = spines(e.src).withTrace(e)
}
val finalEdges = edges.filter{ v => v.src != v.target }
apply(tokens.mkString(" "), tokens.toVector, tags.toVector,
spines.toVector, finalEdges.toVector)
}
def symbolToString(symbol: (String, Vector[String])) =
symbol._1 + symbol._2.map("-"+ _).mkString("")
}
| jkkummerfeld/1ec-graph-parser | parser/src/main/scala/psg.scala | Scala | isc | 29,138 |
package command
object TestCommandPattern extends App {
val pool = new ThreadPool(10)
var email: Email = null
val emailJob = new EmailJob
var sms: Sms = null
val smsJob = new SmsJob
var fileIO: FileIO = null
val fileIOJob = new FileIOJob()
var logging: Logging = null
val logJob = new LoggingJob()
for (i <- 0.until(5)) {
email = new Email
emailJob.setEmail(email)
sms = new Sms
smsJob.setSms(sms)
fileIO = new FileIO
fileIOJob.setFileIO(fileIO)
logging = new Logging
logJob.setLogging(logging)
pool.addJob(emailJob)
pool.addJob(smsJob)
pool.addJob(fileIOJob)
pool.addJob(logJob)
}
pool.shutdownPool
}
| BBK-PiJ-2015-67/sdp-portfolio | exercises/week10/src/main/scala/command/TestCommandPattern.scala | Scala | unlicense | 725 |
package sexamples.networking.pingpongdistributed
import se.sics.kompics.sl._
import se.sics.kompics.network.Network
class Ponger(init: Init[Ponger]) extends ComponentDefinition {
val Init(self: TAddress) = init;
val net = requires[Network];
private var counter: Long = 0L;
net uponEvent {
case ping: Ping => {
counter += 1L;
log.info(s"Got Ping #${counter}!");
trigger(Pong(self, ping.getSource()) -> net);
}
}
}
| kompics/kompics-scala | docs/src/main/scala/sexamples/networking/pingpongdistributed/Ponger.scala | Scala | gpl-2.0 | 456 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui.jobs
import java.net.URLEncoder
import java.nio.charset.StandardCharsets.UTF_8
import java.util.Date
import javax.servlet.http.HttpServletRequest
import scala.collection.JavaConverters._
import scala.xml._
import org.apache.commons.text.StringEscapeUtils
import org.apache.spark.status.AppStatusStore
import org.apache.spark.status.api.v1
import org.apache.spark.ui._
import org.apache.spark.util.Utils
private[ui] class StageTableBase(
store: AppStatusStore,
request: HttpServletRequest,
stages: Seq[v1.StageData],
tableHeaderID: String,
stageTag: String,
basePath: String,
subPath: String,
isFairScheduler: Boolean,
killEnabled: Boolean,
isFailedStage: Boolean) {
val parameterOtherTable = request.getParameterMap().asScala
.filterNot(_._1.startsWith(stageTag))
.map(para => para._1 + "=" + para._2(0))
val parameterStagePage = request.getParameter(stageTag + ".page")
val parameterStageSortColumn = request.getParameter(stageTag + ".sort")
val parameterStageSortDesc = request.getParameter(stageTag + ".desc")
val parameterStagePageSize = request.getParameter(stageTag + ".pageSize")
val stagePage = Option(parameterStagePage).map(_.toInt).getOrElse(1)
val stageSortColumn = Option(parameterStageSortColumn).map { sortColumn =>
UIUtils.decodeURLParameter(sortColumn)
}.getOrElse("Stage Id")
val stageSortDesc = Option(parameterStageSortDesc).map(_.toBoolean).getOrElse(
// New stages should be shown above old jobs by default.
stageSortColumn == "Stage Id"
)
val stagePageSize = Option(parameterStagePageSize).map(_.toInt).getOrElse(100)
val currentTime = System.currentTimeMillis()
val toNodeSeq = try {
new StagePagedTable(
store,
stages,
tableHeaderID,
stageTag,
basePath,
subPath,
isFairScheduler,
killEnabled,
currentTime,
stagePageSize,
stageSortColumn,
stageSortDesc,
isFailedStage,
parameterOtherTable,
request
).table(stagePage)
} catch {
case e @ (_ : IllegalArgumentException | _ : IndexOutOfBoundsException) =>
<div class="alert alert-error">
<p>Error while rendering stage table:</p>
<pre>
{Utils.exceptionString(e)}
</pre>
</div>
}
}
private[ui] class StageTableRowData(
val stage: v1.StageData,
val option: Option[v1.StageData],
val stageId: Int,
val attemptId: Int,
val schedulingPool: String,
val descriptionOption: Option[String],
val submissionTime: Date,
val formattedSubmissionTime: String,
val duration: Long,
val formattedDuration: String,
val inputRead: Long,
val inputReadWithUnit: String,
val outputWrite: Long,
val outputWriteWithUnit: String,
val shuffleRead: Long,
val shuffleReadWithUnit: String,
val shuffleWrite: Long,
val shuffleWriteWithUnit: String)
private[ui] class MissingStageTableRowData(
stageInfo: v1.StageData,
stageId: Int,
attemptId: Int) extends StageTableRowData(
stageInfo, None, stageId, attemptId, "", None, new Date(0), "", -1, "", 0, "", 0, "", 0, "", 0,
"")
/** Page showing list of all ongoing and recently finished stages */
private[ui] class StagePagedTable(
store: AppStatusStore,
stages: Seq[v1.StageData],
tableHeaderId: String,
stageTag: String,
basePath: String,
subPath: String,
isFairScheduler: Boolean,
killEnabled: Boolean,
currentTime: Long,
pageSize: Int,
sortColumn: String,
desc: Boolean,
isFailedStage: Boolean,
parameterOtherTable: Iterable[String],
request: HttpServletRequest) extends PagedTable[StageTableRowData] {
override def tableId: String = stageTag + "-table"
override def tableCssClass: String =
"table table-bordered table-condensed table-striped " +
"table-head-clickable table-cell-width-limited"
override def pageSizeFormField: String = stageTag + ".pageSize"
override def pageNumberFormField: String = stageTag + ".page"
val parameterPath = UIUtils.prependBaseUri(request, basePath) + s"/$subPath/?" +
parameterOtherTable.mkString("&")
override val dataSource = new StageDataSource(
store,
stages,
currentTime,
pageSize,
sortColumn,
desc
)
override def pageLink(page: Int): String = {
val encodedSortColumn = URLEncoder.encode(sortColumn, UTF_8.name())
parameterPath +
s"&$pageNumberFormField=$page" +
s"&$stageTag.sort=$encodedSortColumn" +
s"&$stageTag.desc=$desc" +
s"&$pageSizeFormField=$pageSize" +
s"#$tableHeaderId"
}
override def goButtonFormPath: String = {
val encodedSortColumn = URLEncoder.encode(sortColumn, UTF_8.name())
s"$parameterPath&$stageTag.sort=$encodedSortColumn&$stageTag.desc=$desc#$tableHeaderId"
}
override def headers: Seq[Node] = {
// stageHeadersAndCssClasses has three parts: header title, tooltip information, and sortable.
// The tooltip information could be None, which indicates it does not have a tooltip.
// Otherwise, it has two parts: tooltip text, and position (true for left, false for default).
val stageHeadersAndCssClasses: Seq[(String, String, Boolean)] =
Seq(("Stage Id", null, true)) ++
{if (isFairScheduler) {Seq(("Pool Name", null, true))} else Seq.empty} ++
Seq(
("Description", null, true),
("Submitted", null, true),
("Duration", ToolTips.DURATION, true),
("Tasks: Succeeded/Total", null, false),
("Input", ToolTips.INPUT, true),
("Output", ToolTips.OUTPUT, true),
("Shuffle Read", ToolTips.SHUFFLE_READ, true),
("Shuffle Write", ToolTips.SHUFFLE_WRITE, true)
) ++
{if (isFailedStage) {Seq(("Failure Reason", null, false))} else Seq.empty}
if (!stageHeadersAndCssClasses.filter(_._3).map(_._1).contains(sortColumn)) {
throw new IllegalArgumentException(s"Unknown column: $sortColumn")
}
val headerRow: Seq[Node] = {
stageHeadersAndCssClasses.map { case (header, tooltip, sortable) =>
val headerSpan = if (null != tooltip && !tooltip.isEmpty) {
<span data-toggle="tooltip" data-placement="top" title={tooltip}>
{header}
</span>
} else {
{header}
}
if (header == sortColumn) {
val headerLink = Unparsed(
parameterPath +
s"&$stageTag.sort=${URLEncoder.encode(header, UTF_8.name())}" +
s"&$stageTag.desc=${!desc}" +
s"&$stageTag.pageSize=$pageSize") +
s"#$tableHeaderId"
val arrow = if (desc) "▾" else "▴" // UP or DOWN
<th>
<a href={headerLink}>
{headerSpan}<span>
{Unparsed(arrow)}
</span>
</a>
</th>
} else {
if (sortable) {
val headerLink = Unparsed(
parameterPath +
s"&$stageTag.sort=${URLEncoder.encode(header, UTF_8.name())}" +
s"&$stageTag.pageSize=$pageSize") +
s"#$tableHeaderId"
<th>
<a href={headerLink}>
{headerSpan}
</a>
</th>
} else {
<th>
{headerSpan}
</th>
}
}
}
}
<thead>{headerRow}</thead>
}
override def row(data: StageTableRowData): Seq[Node] = {
<tr id={"stage-" + data.stageId + "-" + data.attemptId}>
{rowContent(data)}
</tr>
}
private def rowContent(data: StageTableRowData): Seq[Node] = {
data.option match {
case None => missingStageRow(data.stageId)
case Some(stageData) =>
val info = data.stage
{if (data.attemptId > 0) {
<td>{data.stageId} (retry {data.attemptId})</td>
} else {
<td>{data.stageId}</td>
}} ++
{if (isFairScheduler) {
<td>
<a href={"%s/stages/pool?poolname=%s"
.format(UIUtils.prependBaseUri(request, basePath), data.schedulingPool)}>
{data.schedulingPool}
</a>
</td>
} else {
Seq.empty
}} ++
<td>{makeDescription(info, data.descriptionOption)}</td>
<td valign="middle">
{data.formattedSubmissionTime}
</td>
<td>{data.formattedDuration}</td>
<td class="progress-cell">
{UIUtils.makeProgressBar(started = stageData.numActiveTasks,
completed = stageData.numCompleteTasks, failed = stageData.numFailedTasks,
skipped = 0, reasonToNumKilled = stageData.killedTasksSummary, total = info.numTasks)}
</td>
<td>{data.inputReadWithUnit}</td>
<td>{data.outputWriteWithUnit}</td>
<td>{data.shuffleReadWithUnit}</td>
<td>{data.shuffleWriteWithUnit}</td> ++
{
if (isFailedStage) {
failureReasonHtml(info)
} else {
Seq.empty
}
}
}
}
private def failureReasonHtml(s: v1.StageData): Seq[Node] = {
val failureReason = s.failureReason.getOrElse("")
val isMultiline = failureReason.indexOf('\\n') >= 0
// Display the first line by default
val failureReasonSummary = StringEscapeUtils.escapeHtml4(
if (isMultiline) {
failureReason.substring(0, failureReason.indexOf('\\n'))
} else {
failureReason
})
val details = if (isMultiline) {
// scalastyle:off
<span onclick="this.parentNode.querySelector('.stacktrace-details').classList.toggle('collapsed')"
class="expand-details">
+details
</span> ++
<div class="stacktrace-details collapsed">
<pre>{failureReason}</pre>
</div>
// scalastyle:on
} else {
""
}
<td valign="middle">{failureReasonSummary}{details}</td>
}
private def makeDescription(s: v1.StageData, descriptionOption: Option[String]): Seq[Node] = {
val basePathUri = UIUtils.prependBaseUri(request, basePath)
val killLink = if (killEnabled) {
val confirm =
s"if (window.confirm('Are you sure you want to kill stage ${s.stageId} ?')) " +
"{ this.parentNode.submit(); return true; } else { return false; }"
// SPARK-6846 this should be POST-only but YARN AM won't proxy POST
/*
val killLinkUri = s"$basePathUri/stages/stage/kill/"
<form action={killLinkUri} method="POST" style="display:inline">
<input type="hidden" name="id" value={s.stageId.toString}/>
<a href="#" onclick={confirm} class="kill-link">(kill)</a>
</form>
*/
val killLinkUri = s"$basePathUri/stages/stage/kill/?id=${s.stageId}"
<a href={killLinkUri} onclick={confirm} class="kill-link">(kill)</a>
} else {
Seq.empty
}
val nameLinkUri = s"$basePathUri/stages/stage/?id=${s.stageId}&attempt=${s.attemptId}"
val nameLink = <a href={nameLinkUri} class="name-link">{s.name}</a>
val cachedRddInfos = store.rddList().filter { rdd => s.rddIds.contains(rdd.id) }
val details = if (s.details != null && s.details.nonEmpty) {
<span onclick="this.parentNode.querySelector('.stage-details').classList.toggle('collapsed')"
class="expand-details">
+details
</span> ++
<div class="stage-details collapsed">
{if (cachedRddInfos.nonEmpty) {
Text("RDD: ") ++
cachedRddInfos.map { i =>
<a href={s"$basePathUri/storage/rdd/?id=${i.id}"}>{i.name}</a>
}
}}
<pre>{s.details}</pre>
</div>
}
val stageDesc = descriptionOption.map(UIUtils.makeDescription(_, basePathUri))
<div>{stageDesc.getOrElse("")} {killLink} {nameLink} {details}</div>
}
protected def missingStageRow(stageId: Int): Seq[Node] = {
<td>{stageId}</td> ++
{if (isFairScheduler) {<td>-</td>} else Seq.empty} ++
<td>No data available for this stage</td> ++ // Description
<td></td> ++ // Submitted
<td></td> ++ // Duration
<td></td> ++ // Tasks: Succeeded/Total
<td></td> ++ // Input
<td></td> ++ // Output
<td></td> ++ // Shuffle Read
<td></td> // Shuffle Write
}
}
private[ui] class StageDataSource(
store: AppStatusStore,
stages: Seq[v1.StageData],
currentTime: Long,
pageSize: Int,
sortColumn: String,
desc: Boolean) extends PagedDataSource[StageTableRowData](pageSize) {
// Convert v1.StageData to StageTableRowData which contains the final contents to show in the
// table so that we can avoid creating duplicate contents during sorting the data
private val data = stages.map(stageRow).sorted(ordering(sortColumn, desc))
private var _slicedStageIds: Set[Int] = _
override def dataSize: Int = data.size
override def sliceData(from: Int, to: Int): Seq[StageTableRowData] = {
val r = data.slice(from, to)
_slicedStageIds = r.map(_.stageId).toSet
r
}
private def stageRow(stageData: v1.StageData): StageTableRowData = {
val formattedSubmissionTime = stageData.submissionTime match {
case Some(t) => UIUtils.formatDate(t)
case None => "Unknown"
}
val finishTime = stageData.completionTime.map(_.getTime()).getOrElse(currentTime)
// The submission time for a stage is misleading because it counts the time
// the stage waits to be launched. (SPARK-10930)
val duration = stageData.firstTaskLaunchedTime.map { date =>
val time = date.getTime()
if (finishTime > time) {
finishTime - time
} else {
None
currentTime - time
}
}
val formattedDuration = duration.map(d => UIUtils.formatDuration(d)).getOrElse("Unknown")
val inputRead = stageData.inputBytes
val inputReadWithUnit = if (inputRead > 0) Utils.bytesToString(inputRead) else ""
val outputWrite = stageData.outputBytes
val outputWriteWithUnit = if (outputWrite > 0) Utils.bytesToString(outputWrite) else ""
val shuffleRead = stageData.shuffleReadBytes
val shuffleReadWithUnit = if (shuffleRead > 0) Utils.bytesToString(shuffleRead) else ""
val shuffleWrite = stageData.shuffleWriteBytes
val shuffleWriteWithUnit = if (shuffleWrite > 0) Utils.bytesToString(shuffleWrite) else ""
new StageTableRowData(
stageData,
Some(stageData),
stageData.stageId,
stageData.attemptId,
stageData.schedulingPool,
stageData.description,
stageData.submissionTime.getOrElse(new Date(0)),
formattedSubmissionTime,
duration.getOrElse(-1),
formattedDuration,
inputRead,
inputReadWithUnit,
outputWrite,
outputWriteWithUnit,
shuffleRead,
shuffleReadWithUnit,
shuffleWrite,
shuffleWriteWithUnit
)
}
/**
* Return Ordering according to sortColumn and desc
*/
private def ordering(sortColumn: String, desc: Boolean): Ordering[StageTableRowData] = {
val ordering: Ordering[StageTableRowData] = sortColumn match {
case "Stage Id" => Ordering.by(_.stageId)
case "Pool Name" => Ordering.by(_.schedulingPool)
case "Description" => Ordering.by(x => (x.descriptionOption, x.stage.name))
case "Submitted" => Ordering.by(_.submissionTime)
case "Duration" => Ordering.by(_.duration)
case "Input" => Ordering.by(_.inputRead)
case "Output" => Ordering.by(_.outputWrite)
case "Shuffle Read" => Ordering.by(_.shuffleRead)
case "Shuffle Write" => Ordering.by(_.shuffleWrite)
case "Tasks: Succeeded/Total" =>
throw new IllegalArgumentException(s"Unsortable column: $sortColumn")
case unknownColumn => throw new IllegalArgumentException(s"Unknown column: $unknownColumn")
}
if (desc) {
ordering.reverse
} else {
ordering
}
}
}
| jkbradley/spark | core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala | Scala | apache-2.0 | 16,666 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.controller
import kafka.api.LeaderAndIsr
import kafka.cluster.{Broker, EndPoint}
import kafka.server.KafkaConfig
import kafka.utils.TestUtils
import kafka.zk.KafkaZkClient.UpdateLeaderAndIsrResult
import kafka.zk.{KafkaZkClient, TopicPartitionStateZNode}
import kafka.zookeeper.{GetDataResponse, ResponseMetadata}
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.network.ListenerName
import org.apache.kafka.common.security.auth.SecurityProtocol
import org.apache.zookeeper.KeeperException.Code
import org.apache.zookeeper.data.Stat
import org.easymock.EasyMock
import org.junit.Assert._
import org.junit.{Before, Test}
class ReplicaStateMachineTest {
private var controllerContext: ControllerContext = null
private var mockZkClient: KafkaZkClient = null
private var mockControllerBrokerRequestBatch: ControllerBrokerRequestBatch = null
private var replicaStateMachine: ReplicaStateMachine = null
private val brokerId = 5
private val config = KafkaConfig.fromProps(TestUtils.createBrokerConfig(brokerId, "zkConnect"))
private val controllerEpoch = 50
private val partition = new TopicPartition("t", 0)
private val partitions = Seq(partition)
private val replica = PartitionAndReplica(partition, brokerId)
private val replicas = Seq(replica)
@Before
def setUp(): Unit = {
controllerContext = new ControllerContext
controllerContext.epoch = controllerEpoch
mockZkClient = EasyMock.createMock(classOf[KafkaZkClient])
mockControllerBrokerRequestBatch = EasyMock.createMock(classOf[ControllerBrokerRequestBatch])
replicaStateMachine = new ZkReplicaStateMachine(config, new StateChangeLogger(brokerId, true, None),
controllerContext, mockZkClient, mockControllerBrokerRequestBatch)
}
private def replicaState(replica: PartitionAndReplica): ReplicaState = {
controllerContext.replicaState(replica)
}
@Test
def testStartupOnlinePartition(): Unit = {
val endpoint1 = new EndPoint("localhost", 9997, new ListenerName("blah"),
SecurityProtocol.PLAINTEXT)
val liveBrokerEpochs = Map(Broker(brokerId, Seq(endpoint1), rack = None) -> 1L)
controllerContext.setLiveBrokers(liveBrokerEpochs)
controllerContext.updatePartitionFullReplicaAssignment(partition, ReplicaAssignment(Seq(brokerId)))
assertEquals(None, controllerContext.replicaStates.get(replica))
replicaStateMachine.startup()
assertEquals(OnlineReplica, replicaState(replica))
}
@Test
def testStartupOfflinePartition(): Unit = {
controllerContext.updatePartitionFullReplicaAssignment(partition, ReplicaAssignment(Seq(brokerId)))
assertEquals(None, controllerContext.replicaStates.get(replica))
replicaStateMachine.startup()
assertEquals(OfflineReplica, replicaState(replica))
}
@Test
def testStartupWithReplicaWithoutLeader(): Unit = {
val shutdownBrokerId = 100
val offlineReplica = PartitionAndReplica(partition, shutdownBrokerId)
val endpoint1 = new EndPoint("localhost", 9997, new ListenerName("blah"),
SecurityProtocol.PLAINTEXT)
val liveBrokerEpochs = Map(Broker(brokerId, Seq(endpoint1), rack = None) -> 1L)
controllerContext.setLiveBrokers(liveBrokerEpochs)
controllerContext.updatePartitionFullReplicaAssignment(partition, ReplicaAssignment(Seq(shutdownBrokerId)))
assertEquals(None, controllerContext.replicaStates.get(offlineReplica))
replicaStateMachine.startup()
assertEquals(OfflineReplica, replicaState(offlineReplica))
}
@Test
def testNonexistentReplicaToNewReplicaTransition(): Unit = {
replicaStateMachine.handleStateChanges(replicas, NewReplica)
assertEquals(NewReplica, replicaState(replica))
}
@Test
def testInvalidNonexistentReplicaToOnlineReplicaTransition(): Unit = {
replicaStateMachine.handleStateChanges(replicas, OnlineReplica)
assertEquals(NonExistentReplica, replicaState(replica))
}
@Test
def testInvalidNonexistentReplicaToOfflineReplicaTransition(): Unit = {
replicaStateMachine.handleStateChanges(replicas, OfflineReplica)
assertEquals(NonExistentReplica, replicaState(replica))
}
@Test
def testInvalidNonexistentReplicaToReplicaDeletionStartedTransition(): Unit = {
replicaStateMachine.handleStateChanges(replicas, ReplicaDeletionStarted)
assertEquals(NonExistentReplica, replicaState(replica))
}
@Test
def testInvalidNonexistentReplicaToReplicaDeletionIneligibleTransition(): Unit = {
replicaStateMachine.handleStateChanges(replicas, ReplicaDeletionIneligible)
assertEquals(NonExistentReplica, replicaState(replica))
}
@Test
def testInvalidNonexistentReplicaToReplicaDeletionSuccessfulTransition(): Unit = {
replicaStateMachine.handleStateChanges(replicas, ReplicaDeletionSuccessful)
assertEquals(NonExistentReplica, replicaState(replica))
}
@Test
def testInvalidNewReplicaToNonexistentReplicaTransition(): Unit = {
testInvalidTransition(NewReplica, NonExistentReplica)
}
@Test
def testNewReplicaToOnlineReplicaTransition(): Unit = {
controllerContext.putReplicaState(replica, NewReplica)
controllerContext.updatePartitionFullReplicaAssignment(partition, ReplicaAssignment(Seq(brokerId)))
replicaStateMachine.handleStateChanges(replicas, OnlineReplica)
assertEquals(OnlineReplica, replicaState(replica))
}
@Test
def testNewReplicaToOfflineReplicaTransition(): Unit = {
val endpoint1 = new EndPoint("localhost", 9997, new ListenerName("blah"),
SecurityProtocol.PLAINTEXT)
val liveBrokerEpochs = Map(Broker(brokerId, Seq(endpoint1), rack = None) -> 1L)
controllerContext.setLiveBrokers(liveBrokerEpochs)
controllerContext.putReplicaState(replica, NewReplica)
EasyMock.expect(mockControllerBrokerRequestBatch.newBatch())
EasyMock.expect(mockControllerBrokerRequestBatch.addStopReplicaRequestForBrokers(EasyMock.eq(Seq(brokerId)), EasyMock.eq(partition), EasyMock.eq(false)))
EasyMock.expect(mockControllerBrokerRequestBatch.addUpdateMetadataRequestForBrokers(EasyMock.eq(Seq(brokerId)), EasyMock.eq(Set(partition))))
EasyMock.expect(mockControllerBrokerRequestBatch.sendRequestsToBrokers(controllerEpoch))
EasyMock.replay(mockControllerBrokerRequestBatch)
replicaStateMachine.handleStateChanges(replicas, OfflineReplica)
EasyMock.verify(mockControllerBrokerRequestBatch)
assertEquals(OfflineReplica, replicaState(replica))
}
@Test
def testInvalidNewReplicaToReplicaDeletionStartedTransition(): Unit = {
testInvalidTransition(NewReplica, ReplicaDeletionStarted)
}
@Test
def testInvalidNewReplicaToReplicaDeletionIneligibleTransition(): Unit = {
testInvalidTransition(NewReplica, ReplicaDeletionIneligible)
}
@Test
def testInvalidNewReplicaToReplicaDeletionSuccessfulTransition(): Unit = {
testInvalidTransition(NewReplica, ReplicaDeletionSuccessful)
}
@Test
def testInvalidOnlineReplicaToNonexistentReplicaTransition(): Unit = {
testInvalidTransition(OnlineReplica, NonExistentReplica)
}
@Test
def testInvalidOnlineReplicaToNewReplicaTransition(): Unit = {
testInvalidTransition(OnlineReplica, NewReplica)
}
@Test
def testOnlineReplicaToOnlineReplicaTransition(): Unit = {
controllerContext.putReplicaState(replica, OnlineReplica)
controllerContext.updatePartitionFullReplicaAssignment(partition, ReplicaAssignment(Seq(brokerId)))
val leaderIsrAndControllerEpoch = LeaderIsrAndControllerEpoch(LeaderAndIsr(brokerId, List(brokerId)), controllerEpoch)
controllerContext.partitionLeadershipInfo.put(partition, leaderIsrAndControllerEpoch)
EasyMock.expect(mockControllerBrokerRequestBatch.newBatch())
EasyMock.expect(mockControllerBrokerRequestBatch.addLeaderAndIsrRequestForBrokers(Seq(brokerId),
partition, leaderIsrAndControllerEpoch, replicaAssignment(Seq(brokerId)), isNew = false))
EasyMock.expect(mockControllerBrokerRequestBatch.sendRequestsToBrokers(controllerEpoch))
EasyMock.replay(mockZkClient, mockControllerBrokerRequestBatch)
replicaStateMachine.handleStateChanges(replicas, OnlineReplica)
EasyMock.verify(mockZkClient, mockControllerBrokerRequestBatch)
assertEquals(OnlineReplica, replicaState(replica))
}
@Test
def testOnlineReplicaToOfflineReplicaTransition(): Unit = {
val otherBrokerId = brokerId + 1
val replicaIds = List(brokerId, otherBrokerId)
controllerContext.putReplicaState(replica, OnlineReplica)
controllerContext.updatePartitionFullReplicaAssignment(partition, ReplicaAssignment(replicaIds))
val leaderAndIsr = LeaderAndIsr(brokerId, replicaIds)
val leaderIsrAndControllerEpoch = LeaderIsrAndControllerEpoch(leaderAndIsr, controllerEpoch)
controllerContext.partitionLeadershipInfo.put(partition, leaderIsrAndControllerEpoch)
val stat = new Stat(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
EasyMock.expect(mockControllerBrokerRequestBatch.newBatch())
EasyMock.expect(mockControllerBrokerRequestBatch.addStopReplicaRequestForBrokers(EasyMock.eq(Seq(brokerId)), EasyMock.eq(partition), EasyMock.eq(false)))
val adjustedLeaderAndIsr = leaderAndIsr.newLeaderAndIsr(LeaderAndIsr.NoLeader, List(otherBrokerId))
val updatedLeaderAndIsr = adjustedLeaderAndIsr.withZkVersion(adjustedLeaderAndIsr .zkVersion + 1)
val updatedLeaderIsrAndControllerEpoch = LeaderIsrAndControllerEpoch(updatedLeaderAndIsr, controllerEpoch)
EasyMock.expect(mockZkClient.getTopicPartitionStatesRaw(partitions)).andReturn(
Seq(GetDataResponse(Code.OK, null, Some(partition),
TopicPartitionStateZNode.encode(leaderIsrAndControllerEpoch), stat, ResponseMetadata(0, 0))))
EasyMock.expect(mockZkClient.updateLeaderAndIsr(Map(partition -> adjustedLeaderAndIsr), controllerEpoch, controllerContext.epochZkVersion))
.andReturn(UpdateLeaderAndIsrResult(Map(partition -> Right(updatedLeaderAndIsr)), Seq.empty))
EasyMock.expect(mockControllerBrokerRequestBatch.addLeaderAndIsrRequestForBrokers(Seq(otherBrokerId),
partition, updatedLeaderIsrAndControllerEpoch, replicaAssignment(replicaIds), isNew = false))
EasyMock.expect(mockControllerBrokerRequestBatch.sendRequestsToBrokers(controllerEpoch))
EasyMock.replay(mockZkClient, mockControllerBrokerRequestBatch)
replicaStateMachine.handleStateChanges(replicas, OfflineReplica)
EasyMock.verify(mockZkClient, mockControllerBrokerRequestBatch)
assertEquals(updatedLeaderIsrAndControllerEpoch, controllerContext.partitionLeadershipInfo(partition))
assertEquals(OfflineReplica, replicaState(replica))
}
@Test
def testInvalidOnlineReplicaToReplicaDeletionStartedTransition(): Unit = {
testInvalidTransition(OnlineReplica, ReplicaDeletionStarted)
}
@Test
def testInvalidOnlineReplicaToReplicaDeletionIneligibleTransition(): Unit = {
testInvalidTransition(OnlineReplica, ReplicaDeletionIneligible)
}
@Test
def testInvalidOnlineReplicaToReplicaDeletionSuccessfulTransition(): Unit = {
testInvalidTransition(OnlineReplica, ReplicaDeletionSuccessful)
}
@Test
def testInvalidOfflineReplicaToNonexistentReplicaTransition(): Unit = {
testInvalidTransition(OfflineReplica, NonExistentReplica)
}
@Test
def testInvalidOfflineReplicaToNewReplicaTransition(): Unit = {
testInvalidTransition(OfflineReplica, NewReplica)
}
@Test
def testOfflineReplicaToOnlineReplicaTransition(): Unit = {
controllerContext.putReplicaState(replica, OfflineReplica)
controllerContext.updatePartitionFullReplicaAssignment(partition, ReplicaAssignment(Seq(brokerId)))
val leaderIsrAndControllerEpoch = LeaderIsrAndControllerEpoch(LeaderAndIsr(brokerId, List(brokerId)), controllerEpoch)
controllerContext.partitionLeadershipInfo.put(partition, leaderIsrAndControllerEpoch)
EasyMock.expect(mockControllerBrokerRequestBatch.newBatch())
EasyMock.expect(mockControllerBrokerRequestBatch.addLeaderAndIsrRequestForBrokers(Seq(brokerId),
partition, leaderIsrAndControllerEpoch, replicaAssignment(Seq(brokerId)), isNew = false))
EasyMock.expect(mockControllerBrokerRequestBatch.sendRequestsToBrokers(controllerEpoch))
EasyMock.replay(mockZkClient, mockControllerBrokerRequestBatch)
replicaStateMachine.handleStateChanges(replicas, OnlineReplica)
EasyMock.verify(mockZkClient, mockControllerBrokerRequestBatch)
assertEquals(OnlineReplica, replicaState(replica))
}
@Test
def testOfflineReplicaToReplicaDeletionStartedTransition(): Unit = {
controllerContext.putReplicaState(replica, OfflineReplica)
EasyMock.expect(mockControllerBrokerRequestBatch.newBatch())
EasyMock.expect(mockControllerBrokerRequestBatch.addStopReplicaRequestForBrokers(Seq(brokerId), partition, true))
EasyMock.expect(mockControllerBrokerRequestBatch.sendRequestsToBrokers(controllerEpoch))
EasyMock.replay(mockZkClient, mockControllerBrokerRequestBatch)
replicaStateMachine.handleStateChanges(replicas, ReplicaDeletionStarted)
EasyMock.verify(mockZkClient, mockControllerBrokerRequestBatch)
assertEquals(ReplicaDeletionStarted, replicaState(replica))
}
@Test
def testOfflineReplicaToReplicaDeletionIneligibleTransition(): Unit = {
controllerContext.putReplicaState(replica, OfflineReplica)
replicaStateMachine.handleStateChanges(replicas, ReplicaDeletionIneligible)
assertEquals(ReplicaDeletionIneligible, replicaState(replica))
}
@Test
def testInvalidOfflineReplicaToReplicaDeletionSuccessfulTransition(): Unit = {
testInvalidTransition(OfflineReplica, ReplicaDeletionSuccessful)
}
@Test
def testInvalidReplicaDeletionStartedToNonexistentReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionStarted, NonExistentReplica)
}
@Test
def testInvalidReplicaDeletionStartedToNewReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionStarted, NewReplica)
}
@Test
def testInvalidReplicaDeletionStartedToOnlineReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionStarted, OnlineReplica)
}
@Test
def testInvalidReplicaDeletionStartedToOfflineReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionStarted, OfflineReplica)
}
@Test
def testReplicaDeletionStartedToReplicaDeletionIneligibleTransition(): Unit = {
controllerContext.putReplicaState(replica, ReplicaDeletionStarted)
replicaStateMachine.handleStateChanges(replicas, ReplicaDeletionIneligible)
assertEquals(ReplicaDeletionIneligible, replicaState(replica))
}
@Test
def testReplicaDeletionStartedToReplicaDeletionSuccessfulTransition(): Unit = {
controllerContext.putReplicaState(replica, ReplicaDeletionStarted)
replicaStateMachine.handleStateChanges(replicas, ReplicaDeletionSuccessful)
assertEquals(ReplicaDeletionSuccessful, replicaState(replica))
}
@Test
def testReplicaDeletionSuccessfulToNonexistentReplicaTransition(): Unit = {
controllerContext.putReplicaState(replica, ReplicaDeletionSuccessful)
controllerContext.updatePartitionFullReplicaAssignment(partition, ReplicaAssignment(Seq(brokerId)))
replicaStateMachine.handleStateChanges(replicas, NonExistentReplica)
assertEquals(Seq.empty, controllerContext.partitionReplicaAssignment(partition))
assertEquals(None, controllerContext.replicaStates.get(replica))
}
@Test
def testInvalidReplicaDeletionSuccessfulToNewReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionSuccessful, NewReplica)
}
@Test
def testInvalidReplicaDeletionSuccessfulToOnlineReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionSuccessful, OnlineReplica)
}
@Test
def testInvalidReplicaDeletionSuccessfulToOfflineReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionSuccessful, OfflineReplica)
}
@Test
def testInvalidReplicaDeletionSuccessfulToReplicaDeletionStartedTransition(): Unit = {
testInvalidTransition(ReplicaDeletionSuccessful, ReplicaDeletionStarted)
}
@Test
def testInvalidReplicaDeletionSuccessfulToReplicaDeletionIneligibleTransition(): Unit = {
testInvalidTransition(ReplicaDeletionSuccessful, ReplicaDeletionIneligible)
}
@Test
def testInvalidReplicaDeletionIneligibleToNonexistentReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionIneligible, NonExistentReplica)
}
@Test
def testInvalidReplicaDeletionIneligibleToNewReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionIneligible, NewReplica)
}
@Test
def testReplicaDeletionIneligibleToOnlineReplicaTransition(): Unit = {
controllerContext.putReplicaState(replica, ReplicaDeletionIneligible)
controllerContext.updatePartitionFullReplicaAssignment(partition, ReplicaAssignment(Seq(brokerId)))
val leaderIsrAndControllerEpoch = LeaderIsrAndControllerEpoch(LeaderAndIsr(brokerId, List(brokerId)), controllerEpoch)
controllerContext.partitionLeadershipInfo.put(partition, leaderIsrAndControllerEpoch)
EasyMock.expect(mockControllerBrokerRequestBatch.newBatch())
EasyMock.expect(mockControllerBrokerRequestBatch.addLeaderAndIsrRequestForBrokers(Seq(brokerId),
partition, leaderIsrAndControllerEpoch, replicaAssignment(Seq(brokerId)), isNew = false))
EasyMock.expect(mockControllerBrokerRequestBatch.sendRequestsToBrokers(controllerEpoch))
EasyMock.replay(mockZkClient, mockControllerBrokerRequestBatch)
replicaStateMachine.handleStateChanges(replicas, OnlineReplica)
EasyMock.verify(mockZkClient, mockControllerBrokerRequestBatch)
assertEquals(OnlineReplica, replicaState(replica))
}
@Test
def testInvalidReplicaDeletionIneligibleToReplicaDeletionStartedTransition(): Unit = {
testInvalidTransition(ReplicaDeletionIneligible, ReplicaDeletionStarted)
}
@Test
def testInvalidReplicaDeletionIneligibleToReplicaDeletionSuccessfulTransition(): Unit = {
testInvalidTransition(ReplicaDeletionIneligible, ReplicaDeletionSuccessful)
}
private def testInvalidTransition(fromState: ReplicaState, toState: ReplicaState): Unit = {
controllerContext.putReplicaState(replica, fromState)
replicaStateMachine.handleStateChanges(replicas, toState)
assertEquals(fromState, replicaState(replica))
}
private def replicaAssignment(replicas: Seq[Int]): ReplicaAssignment = ReplicaAssignment(replicas, Seq(), Seq())
}
| sslavic/kafka | core/src/test/scala/unit/kafka/controller/ReplicaStateMachineTest.scala | Scala | apache-2.0 | 19,090 |
package foo
class Outside
package object bar {
class Val(b: Boolean)
implicit def boolean2Val(b: Boolean): foo.bar.package.Val = new Val(b)
implicit def boolean2Outside(b: Boolean): foo.Outside = new Outside
}
| yusuke2255/dotty | tests/pending/pos/t3999/a_1.scala | Scala | bsd-3-clause | 218 |
package org.openurp.sns.photo.web.action
import java.util.Date
import org.beangle.data.jdbc.query.JdbcExecutor
import org.beangle.webmvc.api.action.ActionSupport
import org.beangle.webmvc.api.annotation.ignore
import org.beangle.webmvc.api.view.View
import org.beangle.webmvc.api.view.Stream
import org.beangle.webmvc.api.annotation.param
import java.io.File
import org.beangle.webmvc.api.annotation.mapping
import java.io.FileInputStream
import java.text.SimpleDateFormat
import javax.activation.MimeType
import java.io.ByteArrayInputStream
import org.beangle.cache.ehcache.EhCacheManager
import org.beangle.webmvc.api.view.StreamView
class SearchAction(ehCacheManager: EhCacheManager) extends ActionSupport {
var jdbcExecutor: JdbcExecutor = _
val cache = ehCacheManager.getCache("photo", classOf[String], classOf[Object])
@mapping("{photoId}")
def index(@param("photoId") photoId: String): View = {
cache.get(photoId) match {
case Some(image) => buildStream(image.asInstanceOf[Array[Byte]], photoId)
case None =>
loadFromDB(photoId) match {
case Some(image) =>
cache.put(photoId, image)
buildStream(image, photoId)
case None =>
val nfile = new File(this.getClass().getClassLoader().getResource("DefaultPhoto.gif").getFile())
val in = new FileInputStream(nfile)
buildStream(new Array[Byte](nfile.length.toInt), photoId)
}
}
}
private def buildStream(bytes: Array[Byte], photoId: String): StreamView = {
Stream(new ByteArrayInputStream(bytes), "image/jpg", photoId + ".jpg")
}
def loadFromDB(photoId: String, size: String = "small"): Option[Array[Byte]] = {
val rs = jdbcExecutor.query("select user_id, updated_at from photo.photo_infos where photo_id =?", photoId)
if (!rs.isEmpty) {
val row = rs.head
val year = row(0).toString.substring(0, 4)
val image = if (size == "small") {
jdbcExecutor.query("select small_image from photo.photos" + year + " where id =?", photoId).head.head.asInstanceOf[Array[Byte]]
} else {
jdbcExecutor.query("select origin_image from photo.photos" + year + " where id =?", photoId).head.head.asInstanceOf[Array[Byte]]
}
Some(image)
} else {
None
}
}
}
| openurp/sns-photo-webapp | ws/src/main/scala/org/openurp/sns/photo/web/action/SearchAction.scala | Scala | gpl-3.0 | 2,301 |
/*
* Copyright (c) 2012-2015 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.enrich
package hadoop
package bad
// Specs2
import org.specs2.mutable.Specification
// Scalding
import com.twitter.scalding._
// Cascading
import cascading.tuple.TupleEntry
// This project
import JobSpecHelpers._
/**
* Holds the input and expected output data
* for the test.
*/
object UnsupportedPayloadCfLinesSpec {
val lines = Lines(
"2012-05-24 11:35:53 DFW3 3343 99.116.172.58 GET d3gs014xn8p70.cloudfront.net /not-ice.png 200 http://www.psychicbazaar.com/2-tarot-cards/genre/all/type/all?p=5 Mozilla/5.0%20(Windows%20NT%206.1;%20WOW64;%20rv:12.0)%20Gecko/20100101%20Firefox/12.0 e=pv&page=Tarot%2520cards%2520-%2520Psychic%2520Bazaar&tid=344260&uid=288112e0a5003be2&vid=1&lang=en-US&refr=http%253A%252F%252Fwww.psychicbazaar.com%252F2-tarot-cards%252Fgenre%252Fall%252Ftype%252Fall%253Fp%253D4&f_pdf=1&f_qt=0&f_realp=0&f_wma=0&f_dir=0&f_fla=1&f_java=1&f_gears=0&f_ag=1&res=1366x768&cookie=1"
)
val expected = (failure_tstamp: String) => s"""{"line":"2012-05-24 11:35:53 DFW3 3343 99.116.172.58 GET d3gs014xn8p70.cloudfront.net /not-ice.png 200 http://www.psychicbazaar.com/2-tarot-cards/genre/all/type/all?p=5 Mozilla/5.0%20(Windows%20NT%206.1;%20WOW64;%20rv:12.0)%20Gecko/20100101%20Firefox/12.0 e=pv&page=Tarot%2520cards%2520-%2520Psychic%2520Bazaar&tid=344260&uid=288112e0a5003be2&vid=1&lang=en-US&refr=http%253A%252F%252Fwww.psychicbazaar.com%252F2-tarot-cards%252Fgenre%252Fall%252Ftype%252Fall%253Fp%253D4&f_pdf=1&f_qt=0&f_realp=0&f_wma=0&f_dir=0&f_fla=1&f_java=1&f_gears=0&f_ag=1&res=1366x768&cookie=1","errors":[{"level":"error","message":"Request path /not-ice.png does not match (/)vendor/version(/) pattern nor is a legacy /i(ce.png) request"}],"failure_tstamp":"$failure_tstamp"}"""
}
/**
* Integration test for the EtlJob:
*
* Input data _is_ in the CloudFront
* access log format, but the fields
* are somehow corrupted.
*/
class UnsupportedPayloadCfLinesSpec extends Specification {
"A job which processes an input line with an unknown payload format" should {
EtlJobSpec("cloudfront", "1", false, List("geo")).
source(MultipleTextLineFiles("inputFolder"), UnsupportedPayloadCfLinesSpec.lines).
sink[String](Tsv("outputFolder")){ output =>
"not write any events" in {
output must beEmpty
}
}.
sink[TupleEntry](Tsv("exceptionsFolder")){ trap =>
"not trap any exceptions" in {
trap must beEmpty
}
}.
sink[String](Tsv("badFolder")){ buf =>
val jsonStr = buf.head
val failure_tstamp = getFailureTstampFrom(jsonStr)
"write a bad row JSON containing the input line and all errors" in {
jsonStr must_== UnsupportedPayloadCfLinesSpec.expected(failure_tstamp)
}
}.
run.
finish
}
}
| jramos/snowplow | 3-enrich/scala-hadoop-enrich/src/test/scala/com.snowplowanalytics.snowplow.enrich.hadoop/bad/UnsupportedPayloadCfLinesSpec.scala | Scala | apache-2.0 | 3,525 |
package com.tpl.hamcraft
import cpw.mods.fml.common.Mod
import cpw.mods.fml.common.event.FMLInitializationEvent
import cpw.mods.fml.common.event.FMLPostInitializationEvent
import cpw.mods.fml.common.event.FMLPreInitializationEvent
import cpw.mods.fml.common.Mod.EventHandler
import cpw.mods.fml.common.network.{NetworkRegistry, NetworkMod}
import java.util.logging.Logger
import java.io.File
import com.tpl.hamcraft.config._
import com.tpl.hamcraft.compat.PowerProxy
@Mod(modid = "HamCraft", name = "H-A-M Craft", version = "MOD_VERSION", modLanguage = "scala", dependencies = "required-after:ThermalExpansion@(3.0,);required-after:bdlib@(1.0,);required-after:Forestry")
@NetworkMod(clientSideRequired = true, serverSideRequired = false)
object HamCraftMod {
var log: Logger = null
var instance = this
final val modId = "hamcraft"
final val channel = "com.tpl.hamcraft"
var configDir: File = null
def logInfo(msg: String, args: Any*) = log.info(msg.format(args: _*))
def logWarn(msg: String, args: Any*) = log.warning(msg.format(args: _*))
@EventHandler
def preInit(event: FMLPreInitializationEvent) {
log = event.getModLog
PowerProxy.logModVersions()
configDir = event.getModConfigurationDirectory
TuningLoader.load("tuning")
TuningLoader.load("recipes")
TuningLoader.load("override", checkJar = false)
Config.load(event.getSuggestedConfigurationFile)
}
@EventHandler
def init(event: FMLInitializationEvent) {
NetworkRegistry.instance.registerGuiHandler(this, Config.guiHandler)
Util.registerOreDictionary()
// Upgrades.init()
TuningLoader.loadDelayed()
}
@EventHandler
def postInit(event: FMLPostInitializationEvent) {
}
}
| piotrb/hamcraft | src/main/scala/com/tpl/hamcraft/HamCraftMod.scala | Scala | bsd-2-clause | 1,709 |
package com.lucho.models
import spray.json.{JsString, JsValue, JsonFormat, DefaultJsonProtocol}
sealed abstract case class NetworkProvider(country: Country.Country) {
val name: String
}
object personal extends NetworkProvider(Country.argentina) {
val name = "personal"
}
object movistar extends NetworkProvider(Country.argentina) {
val name = "movistar"
}
object NetworkProvider {
def apply(provider: String): NetworkProvider = provider match {
case "personal" => personal
case "movistar" => movistar
}
}
object NetworkProviderProtocol extends DefaultJsonProtocol {
implicit object NetworkProviderFormat extends JsonFormat[NetworkProvider] {
override def read(json: JsValue) = json match {
case JsString(provider) => NetworkProvider(provider)
case _ => throw new Exception("Invalid JsValue type for NetworkProvider conversion: JsString")
}
override def write(np: NetworkProvider) = JsString(np.name)
}
}
| lukiano/spray-servlet31 | src/main/scala/com/lucho/models/NetworkProvider.scala | Scala | mit | 1,016 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.detailquery
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
/**
* Test Class for detailed query on multiple datatypes
*/
class AllQueriesSpark2TestCase extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
sql("CREATE TABLE alldatatypestable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED AS carbondata")
sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE alldatatypestable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\\"')""");
sql("CREATE TABLE alldatatypestable_hive (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int)row format delimited fields terminated by ','")
sql(s"""LOAD DATA local inpath '$resourcesPath/datawithoutheader.csv' INTO TABLE alldatatypestable_hive""");
}
test("select empno,empname,utilization,count(salary),sum(empno) from alldatatypestable where empname in ('arvind','ayushi') group by empno,empname,utilization") {
checkAnswer(
sql("select empno,empname,utilization,count(salary),sum(empno) from alldatatypestable where empname in ('arvind','ayushi') group by empno,empname,utilization"),
sql("select empno,empname,utilization,count(salary),sum(empno) from alldatatypestable_hive where empname in ('arvind','ayushi') group by empno,empname,utilization"))
}
override def afterAll {
sql("drop table alldatatypestable")
sql("drop table alldatatypestable_hive")
}
} | jackylk/incubator-carbondata | integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/AllQueriesSpark2TestCase.scala | Scala | apache-2.0 | 2,686 |
/*
* Copyright 2010 LinkedIn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.serializer
import kafka.message.Message
trait Decoder[T] {
def toEvent(message: Message):T
}
class DefaultDecoder extends Decoder[Message] {
def toEvent(message: Message):Message = message
}
class StringDecoder extends Decoder[String] {
def toEvent(message: Message):String = {
val messagePayload = message.payload.slice
val dataLength = messagePayload.array.length - messagePayload.arrayOffset
val messageDataArray = new Array[Byte](dataLength)
messagePayload.get(messageDataArray, 0, dataLength)
new String(messageDataArray)
}
}
| quipo/kafka | core/src/main/scala/kafka/serializer/Decoder.scala | Scala | apache-2.0 | 1,171 |
package me.yingrui.segment.crf
import me.yingrui.segment.math.Matrix
import scala.collection.mutable.ListBuffer
trait Function {
def valueAt(x: Matrix): Double
def derivative: Matrix
}
object LBFGS {
def apply(startAtX: Matrix) = new LBFGS(20, startAtX)
}
class LBFGS(m: Int, X: Matrix) {
val row = X.row
val col = X.col
val gradient = Matrix(row, col)
val newGrad = Matrix(row, col)
val newX = Matrix(row, col)
val dir = Matrix(row, col)
val sList = new ListBuffer[Matrix]() // s(k) = x(k+1) - x(k) = newX - X
val yList = new ListBuffer[Matrix]() // y(k) = g(k+1) - g(k) = newGrad - gradient
val roList = new ListBuffer[Double]() // ro(k) = 1 / [ y(k) * s(k) ]
val previousValues = new ListBuffer[Double]()
val tolerance = 1.0E-4
private def findDirection(grad: Matrix): Unit = {
dir := grad
val m = sList.size
val as = new Array[Double](m)
(0 until m).reverse.foreach(i => {
as(i) = roList(i) * (sList(i) * dir)
dir += (yList(i) x -as(i))
})
//hessian approximate
if (m != 0) {
val y = yList(m - 1)
val dotY = y * y
val gamma = sList(m - 1) * y / dotY
dir *= gamma
}
(0 until m).foreach(i => {
val b = yList(i) * dir * roList(i)
dir += (sList(i) x (as(i) - b))
})
dir *= -1
}
var tic = java.util.Calendar.getInstance().getTime().getTime()
var toc = tic
def timeElapse = {
toc = java.util.Calendar.getInstance().getTime().getTime()
val elapse = toc - tic
tic = toc
elapse
}
private def log(message: String): Unit = println(message)
def search(func: Function): Matrix = {
var it = 0
var value = func.valueAt(X)
gradient := func.derivative
val maxIteration = 300
while (it < maxIteration) {
findDirection(gradient)
val sum = func.derivative.map(d => Math.abs(d)).sum
log("Iteration %d: %10.5f, %10.5f, %d".format(it, value, sum, timeElapse))
releaseHistoryUpdates
val newValue = search(func, value)
newGrad := func.derivative
val nextS = newX - X
val nextY = newGrad - gradient
val ro = 1.0 / (nextS * nextY)
saveHistoryUpdates(nextS, nextY, ro)
previousValues += value
val size = previousValues.size
val previousVal = if (size == 10) previousValues.remove(0) else previousValues(0)
val averageImprovement = (previousVal - newValue) / size.toDouble
val break = (size > 5 && averageImprovement / newValue < tolerance) || (it >= maxIteration)
if (break) {
it = Int.MaxValue
} else {
value = newValue
gradient := newGrad
X := newX
newX.clear
it += 1
}
}
this.X
}
private def saveHistoryUpdates(nextS: Matrix, nextY: Matrix, ro: Double) {
sList += nextS
yList += nextY
roList += ro
}
private def releaseHistoryUpdates {
if (sList.size == m) sList.remove(0)
if (yList.size == m) yList.remove(0)
if (roList.size == m) roList.remove(0)
}
private def search(func: Function, lastIterationValue: Double): Double = {
val normGradInDir = dir * gradient
var a = 1.0D
val c1 = 0.1D
val c = 0.01D
var value = Double.PositiveInfinity
var break = false
var times = 0
do {
a = a * c1
newX := (dir x a) // newGrad := (dir x a)
newX += X // newX := X + newGrad
value = func.valueAt(newX)
break = (value < lastIterationValue + normGradInDir * c * a) || times > 10
times += 1
} while (!break)
value
}
}
| yingrui/mahjong | lib-segment/src/main/scala/me/yingrui/segment/crf/LBFGS.scala | Scala | gpl-3.0 | 3,580 |
package info.armado.ausleihe.client.transport.converter
import java.time.Duration
import java.lang.{Long => JLong}
import javax.xml.bind.annotation.adapters.XmlAdapter
class DurationAdapter extends XmlAdapter[JLong, Duration] {
override def unmarshal(value: JLong): Duration =
Option(value).map(duration => Duration.ofMinutes(duration)).orNull
override def marshal(value: Duration): JLong =
Option(value).map[JLong](duration => duration.toMinutes).orNull[JLong]
}
| Spielekreis-Darmstadt/lending | lending-client-interfaces/src/main/scala/info/armado/ausleihe/client/transport/converter/DurationAdapter.scala | Scala | apache-2.0 | 480 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.yggdrasil
package table
import quasar.blueeyes._
import quasar.blueeyes.json._
import quasar.precog.common._
import quasar.precog.util._
import scalaz._
import scalaz.syntax.std.boolean._
import scala.annotation.tailrec
trait ColumnarTableModuleTestSupport[M[+_]] extends ColumnarTableModule[M] with TableModuleTestSupport[M] {
def newGroupId: GroupId
def defaultSliceSize = 10
private def makeSlice(sampleData: Stream[JValue], sliceSize: Int): (Slice, Stream[JValue]) = {
@tailrec def buildColArrays(from: Stream[JValue], into: Map[ColumnRef, ArrayColumn[_]], sliceIndex: Int): (Map[ColumnRef, ArrayColumn[_]], Int) = {
from match {
case jv #:: xs =>
val refs = Slice.withIdsAndValues(jv, into, sliceIndex, sliceSize)
buildColArrays(xs, refs, sliceIndex + 1)
case _ =>
(into, sliceIndex)
}
}
val (prefix, suffix) = sampleData.splitAt(sliceSize)
val slice = new Slice {
val (columns, size) = buildColArrays(prefix.toStream, Map.empty[ColumnRef, ArrayColumn[_]], 0)
}
(slice, suffix)
}
// production-path code uses fromRValues, but all the tests use fromJson
// this will need to be changed when our tests support non-json such as CDate and CPeriod
def fromJson0(values: Stream[JValue], maxSliceSize: Option[Int] = None): Table = {
val sliceSize = maxSliceSize.getOrElse(yggConfig.maxSliceSize)
Table(
StreamT.unfoldM(values) { events =>
M.point {
(!events.isEmpty) option {
makeSlice(events.toStream, sliceSize)
}
}
},
ExactSize(values.length)
)
}
def fromJson(values: Stream[JValue], maxSliceSize: Option[Int] = None): Table =
fromJson0(values, maxSliceSize orElse Some(defaultSliceSize))
def lookupF1(namespace: List[String], name: String): F1 = {
val lib = Map[String, CF1](
"negate" -> cf.math.Negate,
"coerceToDouble" -> cf.util.CoerceToDouble,
"true" -> CF1("testing::true") { _ => Some(Column.const(true)) }
)
lib(name)
}
def lookupF2(namespace: List[String], name: String): F2 = {
val lib = Map[String, CF2](
"add" -> cf.math.Add,
"mod" -> cf.math.Mod,
"eq" -> cf.std.Eq
)
lib(name)
}
def lookupScanner(namespace: List[String], name: String): CScanner = {
val lib = Map[String, CScanner](
"sum" -> new CScanner {
type A = BigDecimal
val init = BigDecimal(0)
def scan(a: BigDecimal, cols: Map[ColumnRef, Column], range: Range): (A, Map[ColumnRef, Column]) = {
val identityPath = cols collect { case c @ (ColumnRef(CPath.Identity, _), _) => c }
val prioritized = identityPath.values filter {
case (_: LongColumn | _: DoubleColumn | _: NumColumn) => true
case _ => false
}
val mask = BitSetUtil.filteredRange(range.start, range.end) {
i => prioritized exists { _ isDefinedAt i }
}
val (a2, arr) = mask.toList.foldLeft((a, new Array[BigDecimal](range.end))) {
case ((acc, arr), i) => {
val col = prioritized find { _ isDefinedAt i }
val acc2 = col map {
case lc: LongColumn => acc + lc(i)
case dc: DoubleColumn => acc + dc(i)
case nc: NumColumn => acc + nc(i)
case _ => abort("unreachable")
}
acc2 foreach { arr(i) = _ }
(acc2 getOrElse acc, arr)
}
}
(a2, Map(ColumnRef(CPath.Identity, CNum) -> ArrayNumColumn(mask, arr)))
}
}
)
lib(name)
}
}
| drostron/quasar | yggdrasil/src/test/scala/quasar/yggdrasil/table/ColumnarTableModuleTestSupport.scala | Scala | apache-2.0 | 4,301 |
package org.deeplearning4s.nn.conf.layers.factory
import org.deeplearning4j.nn.api.Layer
import org.deeplearning4j.nn.layers.factory.LayerFactories
import org.deeplearning4j.optimize.api.IterationListener
import org.deeplearning4s.nn.conf.NeuralNetConf
import scala.collection.JavaConverters._
object CreateLayerAt {
def apply(conf: NeuralNetConf, index: Int, numLayers: Int, iterationListeners: Seq[IterationListener]): Layer = {
val jConf = conf.asJava
LayerFactories.getFactory(jConf).create(jConf, index, numLayers, iterationListeners.asJava)
}
def apply(conf: NeuralNetConf): Layer = {
val jConf = conf.asJava
LayerFactories.getFactory(jConf).create(jConf)
}
def apply(conf: NeuralNetConf, index: Int, iterationListeners: Seq[IterationListener]): Layer = {
val jConf = conf.asJava
LayerFactories.getFactory(jConf).create(jConf, iterationListeners.asJava, index)
}
} | everpeace/deeplearning4s | src/main/scala/org/deeplearning4s/nn/conf/layers/factory/LayerAt.scala | Scala | apache-2.0 | 912 |
import org.scalatest._
import scalaz._, Scalaz._
import dreamer.concept._, Concept._, Relation._
import dreamer.context._, Context._
class ContextSuite extends FunSuite {
val initCtx = Context(
MentalMap()
+ Edge(Abstract("house"),AtLocation,Abstract("suburb"))
+ Edge(Abstract("house"),AtLocation,Abstract("street"))
+ Edge(Abstract("cat"),AtLocation,Abstract("house"))
+ Edge(Abstract("dog"),AtLocation,Abstract("house"))
+ Edge(Abstract("mountain"),AtLocation,Abstract("surface_of_earth"))
+ Edge(Abstract("mountain"),AtLocation,Abstract("mountainous_region"))
+ Edge(Abstract("goat"),AtLocation,Abstract("mountain"))
+ Edge(Abstract("tree"),AtLocation,Abstract("mountain"))
)
test("State monad should work for reification") {
def run: State[Context,Unit] =
for (house <- reify(Abstract("house"));
mountain <- reify(Abstract("mountain"));
a <- reifyingAsk(Question(What,AtLocation,house));
b <- reifyingAsk(Question(house,AtLocation,What));
c <- reifyingAsk(Question(mountain,AtLocation,What));
d <- reifyingAsk(Question(What,AtLocation,mountain));
e <- reifyingAsk(Question(house,AtLocation,mountain));
f <- reifyingAsk(Question(Self,AtLocation,What));
g <- reifyingAsk(Question(Self,IsA,What));
ctx <- get)
yield {
val aSimple = a.map(edge => archetype(ctx, edge.start)).toSet
val bSimple = b.map(edge => archetype(ctx, edge.end)).toSet
val cSimple = c.map(edge => archetype(ctx, edge.end)).toSet
val dSimple = d.map(edge => archetype(ctx, edge.start)).toSet
assert((aSimple & Set(Abstract("cat"), Abstract("dog"))) != Set())
assert((bSimple & Set(Abstract("suburb"), Abstract("street"))) !=
Set())
assert(b.size == 1)
assert((cSimple & Set(Abstract("surface_of_earth"),
Abstract("mountainous_region"))) != Set())
assert(c.size == 1)
assert((dSimple & Set(Abstract("goat"), Abstract("tree"))) != Set())
assert(e.size == 0)
assert(f.size == 0)
assert(g.size == 0)
()
}
run(initCtx)
}
}
| tcoxon/dreamer | tests/ContextSuite.scala | Scala | mit | 2,295 |
import com.datastax.examples.meetup._
import org.scalatra._
import javax.servlet.ServletContext
class ScalatraBootstrap extends LifeCycle {
override def init(context: ServletContext) {
context.mount(new EventStatsServlet, "/*")
}
}
| rstml/datastax-spark-streaming-demo | web/src/main/scala/ScalatraBootstrap.scala | Scala | apache-2.0 | 241 |
package webui
import cz.kamenitxan.jakon.webui.AdminSettings
import cz.kamenitxan.jakon.webui.controller.impl.Dashboard
import org.scalatest.DoNotDiscover
import org.scalatest.funsuite.AnyFunSuite
import spark.{Request, Response}
/**
* Created by TPa on 08/04/2021.
*/
@DoNotDiscover
class AdminSettingsTest extends AnyFunSuite {
test("setDashboardController test") {
AdminSettings.setDashboardController((req: Request, res: Response) => Dashboard.getDashboard(req, res))
}
}
| kamenitxan/Jakon | modules/backend/src/test/scala/webui/AdminSettingsTest.scala | Scala | bsd-3-clause | 488 |
/*
* Copyright (C) 2016-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.internal.api.tools
import com.lightbend.lagom.internal.spi.{ ServiceAcl, ServiceDescription, ServiceDiscovery }
import com.typesafe.config.ConfigFactory
import play.api._
import play.api.libs.functional.syntax._
import play.api.libs.json._
import scala.collection.JavaConverters._
import scala.collection.immutable
import scala.compat.java8.OptionConverters._
/**
* A service detector locates the services of a Lagom project.
*/
object ServiceDetector {
private val ServiceDiscoveryKey = "lagom.tools.service-discovery"
private val ApplicationLoaderKey = "play.application.loader"
val log = Logger(this.getClass)
implicit val serviceAclsWrites: Writes[ServiceAcl] = (
(__ \\ "method").writeNullable[String] and
(__ \\ "pathPattern").writeNullable[String]
).apply(sa => (sa.method().asScala, sa.pathPattern().asScala))
implicit val serviceDescriptionWrites: Writes[ServiceDescription] = (
(__ \\ "name").write[String] and
(__ \\ "acls").write[immutable.Seq[ServiceAcl]]
).apply(sd => (sd.name, sd.acls.asScala.to[immutable.Seq]))
/**
* Retrieves the service names and acls for the current Lagom project
* of all services.
*
* @param classLoader The class loader should contain a sbt project in the classpath
* for which the services should be resolved.
* @return a JSON array of [[com.lightbend.lagom.internal.spi.ServiceDescription]] objects.
*/
def services(classLoader: ClassLoader): String = {
val config = ConfigFactory.load(classLoader)
val serviceDiscoveryClassName = if (config.hasPath(ServiceDiscoveryKey)) {
config.getString(ServiceDiscoveryKey)
} else {
config.getString(ApplicationLoaderKey)
}
services(classLoader, serviceDiscoveryClassName)
}
private[tools] def services(classLoader: ClassLoader, serviceDiscoveryClassName: String): String = {
log.debug("Loading service discovery class: " + serviceDiscoveryClassName)
val serviceDiscoverClass = classLoader.loadClass(serviceDiscoveryClassName)
val castServiceDiscoveryClass = serviceDiscoverClass.asSubclass(classOf[ServiceDiscovery])
val serviceDiscovery = castServiceDiscoveryClass.newInstance()
val services = serviceDiscovery.discoverServices(classLoader).asScala.to[immutable.Seq]
Json.stringify(Json.toJson(services))
}
}
| edouardKaiser/lagom | api-tools/src/main/scala/com/lightbend/lagom/internal/api/tools/ServiceDetector.scala | Scala | apache-2.0 | 2,441 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server.epoch
import java.util.concurrent.locks.ReentrantReadWriteLock
import kafka.server.LogOffsetMetadata
import kafka.server.checkpoints.LeaderEpochCheckpoint
import org.apache.kafka.common.requests.EpochEndOffset.{UNDEFINED_EPOCH, UNDEFINED_EPOCH_OFFSET}
import kafka.utils.CoreUtils._
import kafka.utils.Logging
import org.apache.kafka.common.TopicPartition
import scala.collection.mutable.ListBuffer
trait LeaderEpochCache {
def assign(leaderEpoch: Int, offset: Long)
def latestEpoch(): Int
def endOffsetFor(epoch: Int): Long
def clearLatest(offset: Long)
def clearEarliest(offset: Long)
def clear()
}
/**
* Represents a cache of (LeaderEpoch => Offset) mappings for a particular replica.
*
* Leader Epoch = epoch assigned to each leader by the controller.
* Offset = offset of the first message in each epoch.
*
* @param leo a function that determines the log end offset
* @param checkpoint the checkpoint file
*/
class LeaderEpochFileCache(topicPartition: TopicPartition, leo: () => LogOffsetMetadata, checkpoint: LeaderEpochCheckpoint) extends LeaderEpochCache with Logging {
private val lock = new ReentrantReadWriteLock()
private var epochs: ListBuffer[EpochEntry] = inWriteLock(lock) { ListBuffer(checkpoint.read(): _*) }
/**
* Assigns the supplied Leader Epoch to the supplied Offset
* Once the epoch is assigned it cannot be reassigned
*
* @param epoch
* @param offset
*/
override def assign(epoch: Int, offset: Long): Unit = {
inWriteLock(lock) {
if (epoch >= 0 && epoch > latestEpoch && offset >= latestOffset) {
info(s"Updated PartitionLeaderEpoch. ${epochChangeMsg(epoch, offset)}. Cache now contains ${epochs.size} entries.")
epochs += EpochEntry(epoch, offset)
flush()
} else {
validateAndMaybeWarn(epoch, offset)
}
}
}
/**
* Returns the current Leader Epoch. This is the latest epoch
* which has messages assigned to it.
*
* @return
*/
override def latestEpoch(): Int = {
inReadLock(lock) {
if (epochs.isEmpty) UNDEFINED_EPOCH else epochs.last.epoch
}
}
/**
* Returns the End Offset for a requested Leader Epoch.
*
* This is defined as the start offset of the first Leader Epoch larger than the
* Leader Epoch requested, or else the Log End Offset if the latest epoch was requested.
*
* @param requestedEpoch
* @return offset
*/
override def endOffsetFor(requestedEpoch: Int): Long = {
inReadLock(lock) {
val offset =
if (requestedEpoch == latestEpoch) {
leo().messageOffset
}
else {
val subsequentEpochs = epochs.filter(e => e.epoch > requestedEpoch)
if (subsequentEpochs.isEmpty || requestedEpoch < epochs.head.epoch)
UNDEFINED_EPOCH_OFFSET
else
subsequentEpochs.head.startOffset
}
debug(s"Processed offset for epoch request for partition ${topicPartition} epoch:$requestedEpoch and returning offset $offset from epoch list of size ${epochs.size}")
offset
}
}
/**
* Removes all epoch entries from the store with start offsets greater than or equal to the passed offset.
*
* @param offset
*/
override def clearLatest(offset: Long): Unit = {
inWriteLock(lock) {
val before = epochs
if (offset >= 0 && offset <= latestOffset()) {
epochs = epochs.filter(entry => entry.startOffset < offset)
flush()
info(s"Cleared latest ${before.toSet.filterNot(epochs.toSet)} entries from epoch cache based on passed offset $offset leaving ${epochs.size} in EpochFile for partition $topicPartition")
}
}
}
/**
* Clears old epoch entries. This method searches for the oldest epoch < offset, updates the saved epoch offset to
* be offset, then clears any previous epoch entries.
*
* This method is exclusive: so clearEarliest(6) will retain an entry at offset 6.
*
* @param offset the offset to clear up to
*/
override def clearEarliest(offset: Long): Unit = {
inWriteLock(lock) {
val before = epochs
if (offset >= 0 && earliestOffset() < offset) {
val earliest = epochs.filter(entry => entry.startOffset < offset)
if (earliest.size > 0) {
epochs = epochs --= earliest
//If the offset is less than the earliest offset remaining, add previous epoch back, but with an updated offset
if (offset < earliestOffset() || epochs.isEmpty)
new EpochEntry(earliest.last.epoch, offset) +=: epochs
flush()
info(s"Cleared earliest ${before.toSet.filterNot(epochs.toSet).size} entries from epoch cache based on passed offset $offset leaving ${epochs.size} in EpochFile for partition $topicPartition")
}
}
}
}
/**
* Delete all entries.
*/
override def clear() = {
inWriteLock(lock) {
epochs.clear()
flush()
}
}
def epochEntries(): ListBuffer[EpochEntry] = {
epochs
}
private def earliestOffset(): Long = {
if (epochs.isEmpty) -1 else epochs.head.startOffset
}
private def latestOffset(): Long = {
if (epochs.isEmpty) -1 else epochs.last.startOffset
}
private def flush(): Unit = {
checkpoint.write(epochs)
}
def epochChangeMsg(epoch: Int, offset: Long) = s"New: {epoch:$epoch, offset:$offset}, Latest: {epoch:$latestEpoch, offset$latestOffset} for Partition: $topicPartition"
def validateAndMaybeWarn(epoch: Int, offset: Long) = {
assert(epoch >= 0, s"Received a PartitionLeaderEpoch assignment for an epoch < 0. This should not happen. ${epochChangeMsg(epoch, offset)}")
if (epoch < latestEpoch())
warn(s"Received a PartitionLeaderEpoch assignment for an epoch < latestEpoch. " +
s"This implies messages have arrived out of order. ${epochChangeMsg(epoch, offset)}")
else if (offset < latestOffset())
warn(s"Received a PartitionLeaderEpoch assignment for an offset < latest offset for the most recent, stored PartitionLeaderEpoch. " +
s"This implies messages have arrived out of order. ${epochChangeMsg(epoch, offset)}")
}
}
// Mapping of epoch to the first offset of the subsequent epoch
case class EpochEntry(epoch: Int, startOffset: Long)
| rhauch/kafka | core/src/main/scala/kafka/server/epoch/LeaderEpochFileCache.scala | Scala | apache-2.0 | 7,123 |
/*
*
* Copyright 2015 David Hall
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* /
*/
package breeze.linalg.functions
import breeze.generic.UFunc
import breeze.linalg.norm
import breeze.linalg.operators.OpSub
/**
* TODO
*
* @author dlwh
**/
trait NormBasedDistance extends UFunc {
implicit def distanceFromNormAndSub[T, U, V]
(implicit subImpl: OpSub.Impl2[T, U, V],
normImpl: norm.Impl2[V, Double, Double]): Impl2[T, U, Double] = {
new Impl2[T, U, Double] {
def apply(v: T, v2: U): Double = {
norm(subImpl(v, v2), normConstant)
}
}
}
protected def normConstant: Double
}
| wstcpyt/breeze | math/src/main/scala/breeze/linalg/functions/NormBasedDistance.scala | Scala | apache-2.0 | 1,144 |
/* Original license for Scala-Chef
The MIT License (MIT)
Copyright (c) 2014-2016 Zane Urbanski, Eric Yu, Loc Hoang
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package scalachefredux
import scala.language.implicitConversions
import scala.language.dynamics
/* Extend this class to start your DSL writing */
class ScalaChefRedux {
val lineBuilder = new LineBuilder
val programText = new ChefText
val programState = new ChefState
var firstRecipeFound = false
var firstOp = false
///////////
// TITLE //
///////////
/* Title parsing; Should only appear at the beginning of a program */
object Title {
def -(recipeTitle: String) = {
if (firstRecipeFound) {
finishLine
}
if (recipeTitle == "") {
throw new RuntimeException("ERROR: Recipe name can't be empty")
}
// Tell program text to save the current line as the start of a new
// function
programText functionStart recipeTitle
// go into limbo mode (next thing should be Ingredients declaration)
lineBuilder.modeLimbo
if (!firstRecipeFound) {
programState setMainRecipe recipeTitle
firstRecipeFound = true
}
}
}
/* Parses "mixing bowl" */
object MixingGetter {
/* mixing bowl by itself = default stack 1 */
def mixing(b: BowlWord) = {
lineBuilder setStackNumber1 1
lineBuilder.setFinished
}
}
/* Parses "bowl <number>" */
object BowlGetter {
/* bowl with number = into some particular stack */
def bowl(bowlNumber: Int) = {
lineBuilder setStackNumber1 bowlNumber
lineBuilder.setFinished
}
/* bowl with no number = by default stack 1 */
def bowl = {
lineBuilder setStackNumber1 1
lineBuilder.setFinished
}
}
/* Continues parsing with "into mixing" or "into the" */
object IntoGetter {
/* "into mixing" leads into "bowl <number>" */
def into(m: MixingWord) = BowlGetter
/* "into the" leads into "mixing bowl" */
def into(t: TheWord) = MixingGetter
}
/* Continues parsing with "to mixing" or "to the" */
object ToGetter {
/* "to mixing" leads into "bowl <number>" */
def to(m: MixingWord) = BowlGetter
/* "to the" leads into "mixing bowl" */
def to(t: TheWord) = MixingGetter
}
/* Take the <ingredient> from the refrigerator */
object Take {
def the(ingredient: String) = {
finishLine
lineBuilder.assertMethod
// set ingredient + op
lineBuilder setString ingredient
lineBuilder setOp E_TAKE
FromGetter
}
def a(ingredient: String) = the(ingredient)
def an(ingredient: String) = the(ingredient)
def some(ingredient: String) = the(ingredient)
object FromGetter {
def from(t: TheWord) = RefrigeratorGetter
}
object RefrigeratorGetter {
def refrigerator = lineBuilder.setFinished
}
}
/* The push operation of the language.
* Put the <ingredient> into mixing bowl <number>; */
object Put {
def the(ingredient: String) = {
finishLine
lineBuilder.assertMethod
// set ingredient + op
lineBuilder setString ingredient
lineBuilder setOp E_PUT
// return the into object
IntoGetter
}
// aliases for "the"
def a(ingredient: String) = the(ingredient)
def an(ingredient: String) = the(ingredient)
def some(ingredient: String) = the(ingredient)
}
object Fold {
def the(ingredient: String) = {
finishLine
lineBuilder.assertMethod
// set ingredient + op
lineBuilder setString ingredient
lineBuilder setOp E_FOLD
// return the into object
IntoGetter
}
// aliases for "the"
def a(ingredient: String) = the(ingredient)
def an(ingredient: String) = the(ingredient)
def some(ingredient: String) = the(ingredient)
}
// Add dry ingredients
// Add dry ingredients (to the) (mixing bowl)
// Add dry ingredients (to mixing) (bowl <number>)
object Add {
def the(ingredient: String) = {
finishLine
lineBuilder.assertMethod
// set ingredient + op
lineBuilder setString ingredient
lineBuilder setOp E_ADD
lineBuilder setStackNumber1 1
// can end here, so mark finished; there is optional part after it
lineBuilder.setFinished
// Optional part can be parsed
ToGetter
}
// aliases for "the"
def a(ingredient: String) = the(ingredient)
def an(ingredient: String) = the(ingredient)
def some(ingredient: String) = the(ingredient)
def dry(i: IngredientsWord) = {
finishLine
lineBuilder.assertMethod
// set ingredient + op
lineBuilder setOp E_ADDDRY
lineBuilder setStackNumber1 1
// can end here, so mark finished; there is optional part after it
lineBuilder.setFinished
ToGetter
}
}
// Remove the <ingredient>
// Remove the <ingredient> (from mixing) (bowl <num>)
// Remove the <ingredient> (from the) (mixing bowl)
object Remove {
def the(ingredient: String) = {
finishLine
lineBuilder.assertMethod
// set ingredient + op
lineBuilder setString ingredient
lineBuilder setOp E_REMOVE
lineBuilder setStackNumber1 1
// can end here, so mark finished; there is optional part after it
lineBuilder.setFinished
// Optional part can be parsed
FromGetter
}
// aliases for "the"
def a(ingredient: String) = the(ingredient)
def an(ingredient: String) = the(ingredient)
def some(ingredient: String) = the(ingredient)
object FromGetter {
/* "from mixing" leads into "bowl <number>" */
def from(m: MixingWord) = BowlGetter
/* "from the" leads into "mixing bowl" */
def from(t: TheWord) = MixingGetter
}
}
/* Combine the <ing> into the mixing bowl
* Combine the <ing> into mixing bowl <#> */
object Combine {
def the(ingredient: String) = {
finishLine
lineBuilder.assertMethod
// set ingredient + op
lineBuilder setString ingredient
lineBuilder setOp E_COMBINE
lineBuilder setStackNumber1 1
// can end here, so mark finished; there is optional part after it
lineBuilder.setFinished
// Optional part can be parsed
IntoGetter
}
// aliases for "the"
def a(ingredient: String) = the(ingredient)
def an(ingredient: String) = the(ingredient)
def some(ingredient: String) = the(ingredient)
}
/* Divide the <ing> into the mixing bowl
* Divide the <ing> into mixing bowl <number> */
object Divide {
def the(ingredient: String) = {
finishLine
lineBuilder.assertMethod
// set ingredient + op
lineBuilder setString ingredient
lineBuilder setOp E_DIVIDE
lineBuilder setStackNumber1 1
// can end here, so mark finished; there is optional part after it
lineBuilder.setFinished
// Optional part can be parsed
IntoGetter
}
// aliases for "the"
def a(ingredient: String) = the(ingredient)
def an(ingredient: String) = the(ingredient)
def some(ingredient: String) = the(ingredient)
}
/* (Liquefy the contents) (of the) (mixing bowl)
* (Liquefy the contents) (of mixing) (bowl <number>) */
object Liquefy {
def the(ingredient: String) = {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_LIQUEFY
lineBuilder setString ingredient
lineBuilder.setFinished
}
def the(c: ContentsWord) = {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_LIQUEFY_CONTENTS
OfGetter
}
object OfGetter {
def of(t: TheWord) = MixingGetter
def of(m: MixingWord) = BowlGetter
}
}
// (Stir for <number>) minutes
// (Stir bowl <number>) (for <number>) minutes
// (Stir the bowl) (for <number>) minutes
// (stir <word> ingredient) into mixing bowl 1
// (stir <word> ingredient) into the mixing bowl
object Stir {
def bowl(bowlNumber: Int) = {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_STIR
lineBuilder setStackNumber1 bowlNumber
ForGetter
}
def the(b: BowlWord) = {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_STIR
lineBuilder setStackNumber1 1
ForGetter
}
// for doens't work, unfortunately, so have to use _for
def _for(num: Int) = {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_STIR
lineBuilder setNumber num
lineBuilder setStackNumber1 1
MinutesGetter
}
// aliases for for
def FOR(num: Int) = _for(num)
def For(num: Int) = _for(num)
def fOr(num: Int) = _for(num)
def f0r(num: Int) = _for(num)
def fr(num: Int) = _for(num)
/* Grab the for in the case when a mixing bowl is included in the
* instruction */
object ForGetter {
def _for(num: Int) = {
lineBuilder setNumber num
MinutesGetter
}
// aliases
def FOR(num: Int) = _for(num)
def For(num: Int) = _for(num)
def fOr(num: Int) = _for(num)
def f0r(num: Int) = _for(num)
def fr(num: Int) = _for(num)
}
/* grab minutes */
object MinutesGetter {
def minutes = lineBuilder.setFinished
}
def the(ingredient: String) = {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_STIR_I
lineBuilder setString ingredient
IntoGetter
}
// the aliases
def a(ingredient: String) = the(ingredient)
def an(ingredient: String) = the(ingredient)
def some(ingredient: String) = the(ingredient)
}
/* Mix bowl <number> well
* Mix the bowl well
* Mix well */
object Mix {
def the(b: BowlWord) = {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_MIX;
lineBuilder setStackNumber1 1
WellGetter
}
def bowl(bowlNumber: Int) = {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_MIX;
lineBuilder setStackNumber1 bowlNumber
WellGetter
}
def well = {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_MIX;
lineBuilder setStackNumber1 1
lineBuilder.setFinished
}
object WellGetter {
def well = lineBuilder.setFinished
}
}
/* Clean up the mixing bowl
* Clean up mixing bowl <number> */
object Clean {
def the(b: BowlWord) = {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_CLEAN;
lineBuilder setStackNumber1 1
lineBuilder.setFinished
}
def bowl(bowlNumber: Int) = {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_CLEAN;
lineBuilder setStackNumber1 bowlNumber
lineBuilder.setFinished
}
// TODO remove this syntax pattern?
def up(t: TheWord) = {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_CLEAN;
MixingGetter
}
def up(m: MixingWord) = {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_CLEAN
BowlGetter
}
}
/* Pour the contents (of mixing) (bowl <number>) (into the) (baking dish)
* Pour the contents (of mixing) (bowl <number>) (into baking) (dish <number>)
* Pour the contents (of the) (mixing bowl) (into baking) (dish <number>)
* Pour the contents (of the) (mixing bowl) (into the) (baking dish) */
object Pour {
def the(c: ContentsWord) = {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_POUR
PourOfGetter
}
object PourOfGetter {
def of(t: TheWord) = PourMixingGetter
def of(m: MixingWord) = PourBowlGetter
}
object PourMixingGetter {
def mixing(b: BowlWord) = {
lineBuilder setStackNumber1 1
PourIntoGetter
}
}
object PourBowlGetter {
def bowl(bowlNumber: Int) = {
lineBuilder setStackNumber1 bowlNumber
PourIntoGetter
}
}
object PourIntoGetter {
def into(b: BakingWord) = PourDishGetter
def into(t: TheWord) = PourBakingGetter
}
object PourDishGetter {
def dish(dishNumber: Int) = {
lineBuilder setStackNumber2 dishNumber
lineBuilder.setFinished
}
}
object PourBakingGetter {
def baking(d: DishWord) = {
lineBuilder setStackNumber2 1
lineBuilder.setFinished
}
}
}
/* convert strings to loop builders */
implicit def string2Loop(s: String) = new LoopBuilder(s.toLowerCase)
class LoopBuilder(s: String) {
val verb = if (s endsWith "e") s + "d" else s + "ed"
def the(ingredient: String) = {
// until getter => verb end, else verb beginning
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_LOOP;
lineBuilder setVerb verb
lineBuilder setString ingredient
lineBuilder.setFinished
UntilGetter
}
// the aliases
def a(ingredient: String) = the(ingredient)
def an(ingredient: String) = the(ingredient)
def some(ingredient: String) = the(ingredient)
def until(lVerb: String) = {
finishLine
// original verb can be ignored, important one is lVerb
val finalVerb = lVerb.toLowerCase
lineBuilder.assertMethod
lineBuilder setOp E_LOOP_END
lineBuilder setVerb finalVerb
lineBuilder.setFinished
}
object UntilGetter {
def until(lVerb: String) = {
val finalVerb = lVerb.toLowerCase
lineBuilder setOp E_LOOP_END
lineBuilder setVerb finalVerb
lineBuilder.setFinished
}
}
}
/* Set aside */
object Set {
def aside = {
finishLine
lineBuilder setOp E_SET
lineBuilder.setFinished
}
}
/* Serve _with <recipe> */
object Serve {
// with is a keyword, so I have to use something else
def _with(recipe: String) = {
finishLine
lineBuilder setOp E_SERVE
lineBuilder setString recipe
lineBuilder.setFinished
}
// Random aliases you can use
def wth(recipe: String) = _with(recipe)
def wit(recipe: String) = _with(recipe)
def WITH(recipe: String) = _with(recipe)
def With(recipe: String) = _with(recipe)
def recipe(r: String) = _with(r)
def dish(r: String) = _with(r)
def along(r: String) = _with(r)
def plus(r: String) = _with(r)
}
/* Refrigerate now
* Refrigerate for <number> hours */
object Refrigerate {
def now = {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_REFRIGERATE
lineBuilder setNumber -1
lineBuilder.setFinished
}
def _for(num: Int) = {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_REFRIGERATE
lineBuilder setNumber num
if (num <= 0) {
throw new RuntimeException("ERROR: hours can't be negative/0")
}
new HourGetter(num)
}
class HourGetter(h: Int) {
def hour = {
if (h > 1) {
throw new RuntimeException("ERROR: hour used for more than 1 hour")
}
lineBuilder.setFinished
}
def hours = {
if (h == 1) {
throw new RuntimeException("ERROR: hours used for 1 hour")
}
lineBuilder.setFinished
}
}
// aliases
def FOR(num: Int) = _for(num)
def For(num: Int) = _for(num)
def fOr(num: Int) = _for(num)
def f0r(num: Int) = _for(num)
def fr(num: Int) = _for(num)
}
object Recipe {
def serves(numberOfDishes: Int) {
finishLine
lineBuilder.assertMethod
lineBuilder setOp E_SERVES
lineBuilder setNumber numberOfDishes
lineBuilder.setFinished
}
}
/* Integers will be converted into this class, and the class will then
* grab ingredient strings. */
class IngredientGetter(num: Int) extends Dynamic {
if (num < 0) {
throw new
RuntimeException("ERROR: Ingredients in a recipe declaration cannot be negative.")
}
/* Ingredients classified as dry */
def g(ingredient: String) = {
lineBuilder.assertIngredient
programText addIngredient (new ChefIngredient(ingredient, I_DRY, num))
}
def kg(ingredient: String) = {
lineBuilder.assertIngredient
programText addIngredient (new ChefIngredient(ingredient, I_DRY, num))
}
def pinch(ingredient: String) = {
lineBuilder.assertIngredient
if (num == 1)
programText addIngredient (new ChefIngredient(ingredient, I_DRY, num))
else
throw new RuntimeException("ERROR: A pinch means a value of 1")
}
def pinches(ingredient: String) = {
lineBuilder.assertIngredient
if (num > 1)
programText addIngredient (new ChefIngredient(ingredient, I_DRY, num))
else
throw new RuntimeException("ERROR: Pinches means a value greater than 1")
}
/* Ingredients classified as liquid */
def ml(ingredient: String) = {
lineBuilder.assertIngredient
programText addIngredient (new ChefIngredient(ingredient, I_LIQUID, num))
}
def l(ingredient: String) = {
lineBuilder.assertIngredient
programText addIngredient (new ChefIngredient(ingredient, I_LIQUID, num))
}
def dash(ingredient: String) = {
lineBuilder.assertIngredient
if (num == 1)
programText addIngredient (new ChefIngredient(ingredient, I_LIQUID, num))
else
throw new RuntimeException("ERROR: A dash means a value of 1")
}
def dashes(ingredient: String) = {
lineBuilder.assertIngredient
if (num > 1)
programText addIngredient (new ChefIngredient(ingredient, I_LIQUID, num))
else
throw new RuntimeException("ERROR: Dashes means a value greater than 1")
}
/* Ingredients classified as either */
def cup(ingredient: String) = {
lineBuilder.assertIngredient
if (num == 1)
programText addIngredient (new ChefIngredient(ingredient, I_EITHER, num))
else
throw new RuntimeException("ERROR: Cup means a value of 1")
}
def cups(ingredient: String) = {
lineBuilder.assertIngredient
if (num > 1)
programText addIngredient (new ChefIngredient(ingredient, I_EITHER, num))
else
throw new RuntimeException("ERROR: Cups means a value greater than 1")
}
def teaspoon(ingredient: String) = {
lineBuilder.assertIngredient
if (num == 1)
programText addIngredient (new ChefIngredient(ingredient, I_EITHER, num))
else
throw new RuntimeException("ERROR: Teaspoon means a value of 1")
}
def teaspoons(ingredient: String) = {
lineBuilder.assertIngredient
if (num > 1)
programText addIngredient (new ChefIngredient(ingredient, I_EITHER, num))
else
throw new RuntimeException("ERROR: Teaspoons means a value greater than 1")
}
def tablespoon(ingredient: String) = {
lineBuilder.assertIngredient
if (num == 1)
programText addIngredient (new ChefIngredient(ingredient, I_EITHER, num))
else
throw new RuntimeException("ERROR: Tablespoon means a value of 1")
}
def tablespoons(ingredient: String) = {
lineBuilder.assertIngredient
if (num > 1)
programText addIngredient (new ChefIngredient(ingredient, I_EITHER, num))
else
throw new RuntimeException("ERROR: Tablespoons means a value greater than 1")
}
// stands for "count"
def ct(ingredient: String) = {
lineBuilder.assertIngredient
programText addIngredient (new ChefIngredient(ingredient, I_EITHER, num))
}
}
implicit def int2IngredientGetter(i: Int) = new IngredientGetter(i)
/* Debug function that will print the lines of the program text */
def printLines = programText.printLines
/* "Enjoy your meal;" is the line that tells you to begin running the
* program. */
object Enjoy {
def your(m: MealWord) = {
// finish the last line
finishLine
// disable the line builder for good
lineBuilder.modeDone
// finish the last function
programText.endFunction
// do a consistency check
programText.consistencyCheck
// run the program
val runner = new ChefRunner(programState, programText)
runner.run
}
}
/* Called to finish a line */
def finishLine = {
if (!firstOp) {
programText addLine lineBuilder.finishLine
} else {
firstOp = false
}
}
///////////////////////
// Mode change lines //
///////////////////////
/* The purpose of the following is to signify a change in the current "mode"
* a Chef program is in: mainly Title, Ingredient, or Method */
/* Mode change to ingredient */
def Ingredients {
lineBuilder.modeIngredient
}
/* Mode change to method */
def Method {
lineBuilder.modeMethod
firstOp = true
}
}
| l-hoang/scala-chef-redux | src/scalachefredux/ScalaChefRedux.scala | Scala | apache-2.0 | 21,956 |
package com.eigengo.lift.exercise.classifiers.workflows
import com.eigengo.lift.exercise._
import com.eigengo.lift.exercise.classifiers.QueryModel.GroundFact
object ClassificationAssertions {
/**
* Named gesture matches with probability >= `matchProbability`
*/
def Gesture(name: String, matchProbability: Double, sensor: SensorDataSourceLocation): GroundFact =
new GroundFact(name, matchProbability, sensor) {
override def toString: String = {
s"($name@$sensor >= $matchProbability)"
}
}
/**
* Bind inferred (e.g. machine learnt) assertions to sensors in a network of sensors.
*
* @param facts facts true of this location
* @param value raw sensor network data that assertion holds for
*/
case class BindToSensors(facts: Set[GroundFact], value: SensorNetValue)
}
| ceecer1/open-muvr | server/exercise/src/main/scala/com/eigengo/lift/exercise/classifiers/workflows/ClassificationAssertions.scala | Scala | apache-2.0 | 830 |
package client
import comms.Client
import org.scalajs.dom.{KeyboardEvent, UIEvent}
import shared.autowire.SharedApi
import shared.models.Shuttle
import autowire._
import scala.scalajs.js.annotation.JSExport
import scalatags.JsDom.all._
import org.scalajs.dom
import org.scalajs.dom.ext.KeyCode
import org.scalajs.dom.raw.Event
@JSExport
object Main {
@JSExport
def main(): Unit = {
val in = input(placeholder := "Ping String Here").render
val output = span().render
val btnGo = input(`type` := "submit", value := "Go").render
val btnClear = input(`type` := "button", value := "Clear").render
btnClear.onclick = (e: UIEvent) => {
in.value = ""
output.innerHTML = ""
}
import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
btnGo.onclick = (e: Event) => {
Client[SharedApi].getShuttle(in.value).call().foreach { shuttle: Shuttle =>
output.appendChild(span(shuttle.ping).render)
output.appendChild(p().render)
output.appendChild(span(shuttle.pong).render)
output.appendChild(p().render)
}
}
in.onkeyup = (e: KeyboardEvent) => {
e.keyCode match {
case KeyCode.Enter => btnGo.click()
case KeyCode.Escape => btnClear.click()
case _ => None
}
}
dom.document.body.innerHTML = ""
dom.document.body.appendChild(div(
in,
btnGo,
btnClear,
p(),
output
).render)
in.focus()
}
} | aholland/play-scalajs-workbench-example | workbench/src/main/scala/client/Main.scala | Scala | mit | 1,474 |
package org.jetbrains.plugins.scala.lang.parser
import com.intellij.lang.{ASTNode, PsiBuilder, PsiParser}
import com.intellij.psi.tree.IElementType
import org.jetbrains.plugins.scala.lang.parser.parsing.Program
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilderImpl
import org.jetbrains.plugins.scala.lang.parser.parsing.expressions.BlockExpr
class ScalaParser extends PsiParser {
protected val blockExpr: BlockExpr = BlockExpr
protected val program: Program = Program
def parse(root: IElementType, builder: PsiBuilder): ASTNode = {
root match {
case ScalaElementTypes.BLOCK_EXPR =>
blockExpr.parse(new ScalaPsiBuilderImpl(builder))
case _ =>
val rootMarker = builder.mark
program.parse(new ScalaPsiBuilderImpl(builder))
rootMarker.done(root)
}
builder.getTreeBuilt
}
}
| gtache/intellij-lsp | intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/parser/ScalaParser.scala | Scala | apache-2.0 | 865 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//package cogdebugger.ui.fieldvisualizations.color
//
//import cogx._
//import scala.swing._
//import cogx.platform.types.Pixel
//import cogx.platform.cpumemory.ColorFieldMemory
//
///** Stand-alone testing of a field viewer.
// *
// *
// * @author Greg Snider
// */
//object TestColorFieldMemoryView extends SimpleSwingApplication {
// // Create a simple color field
// val Rows = 100
// val Columns = 100
// val colorField = ColorFieldMemory(Rows, Columns,
// (r, c) => new Pixel(r.toFloat / Rows, c.toFloat / Columns, 0f))
//
// lazy val top = new MainFrame {
// title = "Test ColorFieldMemoryView"
// contents = new BoxPanel(Orientation.Horizontal) {
// contents += new ColorFieldMemoryView(colorField, colorField.fieldShape) {
// update(colorField, colorField, 0L)
// }
//// contents += new Geometric2DVectorView(field0D)
// }
// minimumSize = new Dimension(250, 100)
// }
//}
| hpe-cct/cct-core | src/test/scala/cogdebugger/ui/fieldvisualizations/color/TestColorFieldMemoryView.scala | Scala | apache-2.0 | 1,555 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.kafka
import com.github.benmanes.caffeine.cache.Ticker
import com.vividsolutions.jts.geom.Point
import org.geotools.factory.CommonFactoryFinder
import org.geotools.feature.simple.SimpleFeatureBuilder
import org.geotools.filter.text.ecql.ECQL
import org.joda.time.{DateTime, DateTimeZone, Instant}
import org.junit.runner.RunWith
import org.locationtech.geomesa.filter._
import org.locationtech.geomesa.utils.geotools.Conversions._
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.text.WKTUtils
import org.opengis.feature.simple.SimpleFeature
import org.opengis.filter._
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
import scala.util.Random
@RunWith(classOf[JUnitRunner])
class LiveFeatureCacheBenchmarkTest extends Specification {
implicit def sfToCreate(feature: SimpleFeature): CreateOrUpdate = CreateOrUpdate(Instant.now, feature)
implicit val ff = CommonFactoryFinder.getFilterFactory2
val spec = Seq(
"Who:String:cq-index=default",
"What:Integer:cq-index=navigable",
"When:Date:cq-index=navigable",
"*Where:Point:srid=4326",
"Why:String").mkString(",")
val MIN_DATE = new DateTime(2014, 1, 1, 0, 0, 0, DateTimeZone.forID("UTC"))
val seconds_per_year = 365L * 24L * 60L * 60L
val string = "foo"
def randDate = MIN_DATE.plusSeconds(scala.math.round(scala.util.Random.nextFloat * seconds_per_year)).toDate
val sft = SimpleFeatureTypes.createType("test", spec)
val builder = new SimpleFeatureBuilder(sft)
val names = Array("Addams", "Bierce", "Clemens", "Damon", "Evan", "Fred", "Goliath", "Harry")
def getName: String = names(Random.nextInt(names.length))
def getPoint: Point = {
val minx = -180
val miny = -90
val dx = 360
val dy = 180
val x = minx + Random.nextDouble * dx
val y = miny + Random.nextDouble * dy
WKTUtils.read(s"POINT($x $y)").asInstanceOf[Point]
}
def buildFeature(i: Int): SimpleFeature = {
builder.set("Who", getName)
builder.set("What", Random.nextInt(10))
builder.set("When", randDate)
builder.set("Where", getPoint)
if (Random.nextBoolean()) {
builder.set("Why", string)
}
builder.buildFeature(i.toString)
}
def mean(values: Seq[Long]): Double = {
values.sum.toDouble / values.length.toDouble
}
def sd(values: Seq[Long]): Double = {
val mn = mean(values)
math.sqrt(
values.map(x => math.pow(x.toDouble - mn, 2.0)).sum /
(values.length - 1).toDouble)
}
def fd(value: Double): String = {
"%.1f".format(value)
}
def runQueries[T](n: Int, genIter: T => Long, filters: Seq[T]) = {
println(Seq(
"c_max",
"c_min",
"t_max",
"t_mean",
"t_sd",
"t_min",
"filter"
).mkString("\\t"))
for (f <- filters) {
val timeRes = (1 to n).map(i => time(genIter(f)))
val counts = timeRes.map(_._1)
val times = timeRes.map(_._2)
println(Seq(
counts.max,
counts.min,
times.max,
fd(mean(times)),
fd(sd(times)),
times.min,
f.toString
).mkString("\\t"))
}
}
def runQueriesMultiple[T](n: Int,
labels: Seq[String],
genIter: Seq[T => Long],
filters: Seq[T]) = {
val sep = "\\t"
val header = (for (l <- labels; c <- Seq("count", "tmax", "tmean", "tsd", "tmin")) yield l + "_" + c)
print(header.mkString(sep))
println(sep + "filter")
for (f <- filters) {
val row = genIter.flatMap {
g => {
val timeRes = (1 to n).map(i => time(g(f)))
val counts = timeRes.map(_._1)
val times = timeRes.map(_._2)
Seq(
counts.max,
times.max,
fd(mean(times)),
fd(sd(times)),
times.min)
}
}
print(row.mkString(sep))
println(sep + f)
}
}
def runQueriesMultipleRaw[T](n: Int,
labels: Seq[String],
genIter: Seq[T => Long],
filters: Seq[T]) = {
val sep = ","
val header = for (i <- 1 to filters.size ; l <- labels) yield s"f$i.$l"
println(header.mkString(sep))
for (i <- 1 to n) {
val row = for (f <- filters; g <- genIter) yield {
val (counts, t) = time(g(f))
t
}
println(row.toList.mkString(sep))
}
}
def time[A](a: => A) = {
val now = System.currentTimeMillis()
val result = a
(result, System.currentTimeMillis() - now)
}
def timeUnit[Unit](a: => Unit) = {
val now = System.currentTimeMillis()
a
System.currentTimeMillis() - now
}
def countPopulate(count: Int, time: Long): String = {
"%d in %d ms (%.1f /ms)".format(count, time, count.toDouble / time)
}
val ab = ECQL.toFilter("Who IN('Addams', 'Bierce')")
val cd = ECQL.toFilter("Who IN('Clemens', 'Damon')")
val w14 = ECQL.toFilter("What = 1 OR What = 2 OR What = 3 or What = 4")
val where = ECQL.toFilter("BBOX(Where, 0, 0, 180, 90)")
val where2 = ECQL.toFilter("BBOX(Where, -180, -90, 0, 0)")
val bbox2 = ff.or(where, where2)
val justified = ECQL.toFilter("Why is not null")
val justifiedAB = ff.and(ff.and(ab, w14), justified)
val justifiedCD = ff.and(ff.and(cd, w14), justified)
val just = ff.or(justifiedAB, justifiedCD)
val justBBOX = ff.and(just, where)
val justBBOX2 = ff.and(just, where2)
val overlapWhere1 = ECQL.toFilter("BBOX(Where, -180, 0, 0, 90)")
val overlapWhere2 = ECQL.toFilter("BBOX(Where, -90, -90, 0, 90)")
val overlapOR1 = ff.or(overlapWhere1, overlapWhere2)
val overlapOR2 = ECQL.toFilter("Who = 'Addams' OR What = 1")
val overlapORpathological = ff.or(List[Filter](
"Who = 'Addams'",
"What = 1",
"Who = 'Bierce'",
"What = 2",
"Who = 'Clemons'",
"What = 3",
"Who = 'Damon'",
"What = 4",
"Who = 'Evan'",
"What = 5",
"Who = 'Fred'",
"What = 6",
"Who = 'Goliath'",
"What = 7",
"Who = 'Harry'",
"What = 8"))
val filters = Seq(ab, cd, w14, where, justified, justifiedAB, justifiedCD, just, justBBOX, justBBOX2, bbox2, overlapOR1, overlapOR2, overlapORpathological)
val nFeats = 100000
val feats = (0 until nFeats).map(buildFeature)
val featsUpdate = (0 until nFeats).map(buildFeature)
// load different LiveFeatureCache implementations
implicit val ticker = Ticker.systemTicker()
val lfc = new LiveFeatureCacheGuava(sft, None)
//val h2 = new LiveFeatureCacheH2(sft)
val cq = new LiveFeatureCacheCQEngine(sft, None)
"LiveFeatureCacheCQEngine " should {
"benchmark" >> {
skipped
val lfc_pop = timeUnit(feats.foreach {
lfc.createOrUpdateFeature(_)
})
println("lfc pop: " + countPopulate(feats.size, lfc_pop))
val lfc_repop = timeUnit(featsUpdate.foreach {
lfc.createOrUpdateFeature(_)
})
println("lfc repop: " + countPopulate(featsUpdate.size, lfc_repop))
val cq_pop = timeUnit({
for (sf <- feats) cq.createOrUpdateFeature(sf)
})
println("cq pop: " + countPopulate(feats.size, cq_pop))
val cq_repop = timeUnit({
for (sf <- featsUpdate) cq.createOrUpdateFeature(sf)
})
println("cq repop: " + countPopulate(featsUpdate.size, cq_repop))
runQueriesMultipleRaw[Filter](
11,
Seq("lfc", "cq", "cqdd"),
Seq(
f => lfc.getReaderForFilter(f).toIterator.size,
f => cq.geocq.queryCQ(f, false).toIterator.size,
f => cq.geocq.queryCQ(f, true).toIterator.size),
filters)
true must equalTo(true)
}
}
}
| MutahirKazmi/geomesa | geomesa-kafka/geomesa-kafka-datastore/geomesa-kafka-datastore-common/src/test/scala/org/locationtech/geomesa/kafka/LiveFeatureCacheBenchmarkTest.scala | Scala | apache-2.0 | 8,242 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.