code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
/*
* The MIT License (MIT)
*
* Copyright (c) 2018 Lars Kroll <bathtor@googlemail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package com.lkroll.common.data
import org.scalatest._
import org.scalatest.funsuite._
import org.scalatest.matchers.should.Matchers
class WordMatchTests extends AnyFunSuite with Matchers {
val w1 = "Word";
val w2 = "World";
val w3 = "Nothing";
val w4 = "Something";
test("Edit Distance should work") {
import EditDistance.editDist;
editDist(w1, w1) should be(0);
editDist(w1, w2) should be(1);
editDist(w3, w4) should be(3);
}
test("WordMatching should work") {
val wm1 = WordMatch.matchForCase(w1, w1);
wm1.relativeMatch should be(1.0f);
wm1.relativeDistance should be(0.0f);
wm1.isSignificant() should be(true);
val wm2 = WordMatch.matchForIgnoreCase(w1, w1);
wm2.relativeMatch should be(1.0f);
wm2.relativeDistance should be(0.0f);
wm2.isSignificant() should be(true);
val wm3 = WordMatch.matchForCase(w1, w2);
wm3.relativeMatch should be < 1.0f;
wm3.relativeDistance should be > 0.0f;
wm3.isSignificant() should be(true);
val wm4 = WordMatch.matchForCase(w1, w4);
wm4.relativeMatch should be < 0.5f;
wm4.relativeDistance should be > 0.5f;
wm4.isSignificant() should be(false);
}
}
|
Bathtor/common-utils
|
data-tools/shared/src/test/scala/com/lkroll/common/data/WordMatchTests.scala
|
Scala
|
mit
| 2,359
|
/**
* Copyright (C) 2013 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.fr
import org.orbeon.oxf.fr.FormRunner._
import org.orbeon.oxf.properties.Properties
import org.orbeon.oxf.util.NetUtils
import org.orbeon.oxf.util.ScalaUtils._
import org.orbeon.oxf.webapp.HttpStatusCodeException
import org.orbeon.oxf.xforms.XFormsConstants._
import org.orbeon.oxf.xforms.action.XFormsAPI._
import org.orbeon.oxf.xml.XMLConstants._
import org.orbeon.saxon.om.NodeInfo
import org.orbeon.scaxon.XML._
import XMLNames._
trait FormRunnerBaseOps {
val XH = XHTML_NAMESPACE_URI
val XF = XFORMS_NAMESPACE_URI
val XS = XSD_URI
val XBL = XBL_NAMESPACE_URI
val FR = "http://orbeon.org/oxf/xml/form-runner"
val NoscriptParam = "fr-noscript"
val LanguageParam = "fr-language"
val EmbeddableParam = "orbeon-embeddable"
val LiferayLanguageHeader = "orbeon-liferay-language"
val ParametersModel = "fr-parameters-model"
val PersistenceModel = "fr-persistence-model"
val ResourcesModel = "fr-resources-model"
val FormModel = "fr-form-model"
val ErrorSummaryModel = "fr-error-summary-model"
val SectionsModel = "fr-sections-model"
val TemplateSuffix = "-template"
// Get an id based on a name
// NOTE: The idea as of 2011-06-21 is that we support reading indiscriminately the -control, -grid
// suffixes, whatever type of actual control they apply to. The idea is that in the end we might decide to just use
// -control. OTOH we must have distinct ids for binds, controls and templates, so the -bind, -control and -template
// suffixes must remain.
def bindId(controlName: String) = controlName + "-bind"
def gridId(gridName: String) = gridName + "-grid"
def controlId(controlName: String) = controlName + "-control"
def templateId(gridName: String) = gridName + TemplateSuffix
def defaultIterationName(repeatName: String) = repeatName + "-iteration"
// Find a view element by id, using the index if possible, otherwise traversing the document
// NOTE: Searching by traversing if no index should be done directly in the selectID implementation.
def findInViewTryIndex(inDoc: NodeInfo, id: String) =
findTryIndex(inDoc, id, findFRBodyElement(inDoc), includeSelf = false)
def findInModelTryIndex(inDoc: NodeInfo, id: String) =
findTryIndex(inDoc, id, findModelElement(inDoc), includeSelf = false)
def findInBindsTryIndex(inDoc: NodeInfo, id: String) =
findTryIndex(inDoc, id, findTopLevelBind(inDoc).get, includeSelf = true)
private def findTryIndex(inDoc: NodeInfo, id: String, under: NodeInfo, includeSelf: Boolean): Option[NodeInfo] = {
// NOTE: This is a rather crude way of testing the presence of the index! But we do know for now that this is
// only called from the functions above, which search in a form's view, model, or binds, which implies the
// existence of a form model.
val hasIndex = inDoc.getDocumentRoot.selectID("fr-form-model") ne null
def isUnder(node: NodeInfo) =
if (includeSelf)
node ancestorOrSelf * contains under
else
node ancestor * contains under
def fromSearch =
if (includeSelf)
under descendantOrSelf * find (_.id == id)
else
under descendant * find (_.id == id)
def fromIndex =
Option(inDoc.getDocumentRoot.selectID(id)) match {
case elemOpt @ Some(elem) if isUnder(elem) ⇒ elemOpt
case Some(elem) ⇒ fromSearch
case None ⇒ None
}
if (hasIndex)
fromIndex
else
fromSearch
}
// Get the body element assuming the structure of an XHTML document, annotated or not, OR the structure of xbl:xbl.
// NOTE: annotate.xpl replaces fr:body with xf:group[@class = 'fb-body']
def findFRBodyElement(inDoc: NodeInfo) = {
def fromGroupById = Option(inDoc.getDocumentRoot.selectID("fb-body"))
def fromGroup = inDoc.rootElement \\ "*:body" \\\\ XFGroupTest find (_.id == "fb-body")
def fromFRBody = inDoc.rootElement \\ "*:body" \\\\ FRBodyTest headOption
def fromTemplate = inDoc.rootElement \\ XBLTemplateTest headOption
fromGroupById orElse fromGroup orElse fromFRBody orElse fromTemplate get
}
// Get the form model
def findModelElement(inDoc: NodeInfo) = {
def fromHead = inDoc.rootElement \\ "*:head" \\ XFModelTest find (hasIdValue(_, FormModel))
def fromImplementation = inDoc.rootElement \\ XBLImplementationTest \\ XFModelTest headOption
fromHead orElse fromImplementation head
}
// Find an xf:instance element
def instanceElement(inDoc: NodeInfo, id: String) =
findModelElement(inDoc) \\ "*:instance" find (hasIdValue(_, id))
// Find an inline instance's root element
def inlineInstanceRootElement(inDoc: NodeInfo, id: String) =
instanceElement(inDoc, id).toList \\ * headOption
// Find all template instances
def templateInstanceElements(inDoc: NodeInfo) =
findModelElement(inDoc) \\ "*:instance" filter (_.id endsWith TemplateSuffix)
// Get the root element of instances
def formInstanceRoot(inDoc: NodeInfo) = inlineInstanceRootElement(inDoc, "fr-form-instance").get
def metadataInstanceRoot(inDoc: NodeInfo) = inlineInstanceRootElement(inDoc, "fr-form-metadata")
def resourcesInstanceRoot(inDoc: NodeInfo) = inlineInstanceRootElement(inDoc, "fr-form-resources").get
private val TopLevelBindIds = Set("fr-form-binds", "fb-form-binds")
// Find the top-level binds (marked with "fr-form-binds" or "fb-form-binds"), if any
def findTopLevelBind(inDoc: NodeInfo): Option[NodeInfo] =
findModelElement(inDoc) \\ "*:bind" find {
// There should be an id, but for backward compatibility also support ref/nodeset pointing to fr-form-instance
bind ⇒ TopLevelBindIds(bind.id) || bindRefOrNodeset(bind).contains("instance('fr-form-instance')")
}
def properties = Properties.instance.getPropertySet
def buildPropertyName(name: String)(implicit p: FormRunnerParams) =
if (hasAppForm(p.app, p.form))
name :: p.app :: p.form :: Nil mkString "."
else
name
// Return a property using the form's app/name, None if the property is not defined
def formRunnerProperty(name: String)(implicit p: FormRunnerParams) =
Option(properties.getObject(buildPropertyName(name))) map (_.toString)
// Return a boolean property using the form's app/name, false if the property is not defined
def booleanFormRunnerProperty(name: String)(implicit p: FormRunnerParams) =
Option(properties.getObject(buildPropertyName(name))) map (_.toString) contains "true"
// Interrupt current processing and send an error code to the client.
// NOTE: This could be done through ExternalContext
//@XPathFunction
def sendError(code: Int) = throw new HttpStatusCodeException(code)
def sendError(code: Int, resource: String) = throw new HttpStatusCodeException(code, Option(resource))
// Append a query string to a URL
def appendQueryString(urlString: String, queryString: String) = NetUtils.appendQueryString(urlString, queryString)
// Return specific Form Runner instances
def formInstance = topLevelInstance(FormModel, "fr-form-instance") get
def metadataInstance = topLevelInstance(FormModel, "fr-form-metadata")
def parametersInstance = topLevelInstance(ParametersModel, "fr-parameters-instance") get
def errorSummaryInstance = topLevelInstance(ErrorSummaryModel, "fr-error-summary-instance") get
def persistenceInstance = topLevelInstance(PersistenceModel, "fr-persistence-instance") get
def authorizedOperationsInstance = topLevelInstance(PersistenceModel, "fr-authorized-operations") get
// See also FormRunnerHome
private val UpdateOps = Set("*", "update")
def authorizedOperations = split[Set](authorizedOperationsInstance.rootElement.stringValue)
def supportsUpdate = authorizedOperations intersect UpdateOps nonEmpty
// Captcha support
def hasCaptcha = formRunnerProperty("oxf.fr.detail.captcha")(FormRunnerParams()) exists Set("reCAPTCHA", "SimpleCaptcha")
def captchaPassed = persistenceInstance.rootElement / "captcha" === "true"
//@XPathFunction
def showCaptcha = hasCaptcha && Set("new", "edit")(FormRunnerParams().mode) && ! captchaPassed && ! isNoscript
def isNoscript = containingDocument.noscript
def isEmbeddable = containingDocument.getRequestParameters.get(EmbeddableParam) map (_.head) contains "true"
// The standard Form Runner parameters
case class FormRunnerParams(app: String, form: String, formVersion: String, document: Option[String], mode: String)
object FormRunnerParams {
def apply(): FormRunnerParams = {
val params = parametersInstance.rootElement
FormRunnerParams(
app = params \\ "app",
form = params \\ "form",
formVersion = params \\ "form-version",
document = nonEmptyOrNone(params \\ "document"),
mode = params \\ "mode"
)
}
}
// Display a success message
//@XPathFunction
def successMessage(message: String): Unit = {
setvalue(persistenceInstance.rootElement \\ "message", message)
toggle("fr-message-success")
}
// Display an error message
//@XPathFunction
def errorMessage(message: String): Unit =
dispatch(name = "fr-show", targetId = "fr-error-dialog", properties = Map("message" → Some(message)))
}
|
ajw625/orbeon-forms
|
src/main/scala/org/orbeon/oxf/fr/FormRunnerBaseOps.scala
|
Scala
|
lgpl-2.1
| 10,653
|
package scala.slick.compiler
import scala.slick.ast._
import Util._
import TypeUtil._
/** Infer types and compute missing structural views for all nominal table types. */
class InferTypes extends Phase {
val name = "inferTypes"
def apply(state: CompilerState) = state.map { tree =>
val tree2 = tree.nodeWithComputedType(new DefaultSymbolScope(Map.empty), true, false)
val structs = tree2.collect[(TypeSymbol, (Symbol, Type))] {
case s @ Select(_ :@ (n: NominalType), sym) => n.sourceNominalType.sym -> (sym -> s.nodeType)
}.groupBy(_._1).mapValues(v => StructType(v.map(_._2).toMap.toIndexedSeq))
logger.debug("Found Selects for NominalTypes: "+structs.keySet.mkString(", "))
def tr(n: Node): Node = n.nodeMapChildren(tr, keepType = true).nodeTypedOrCopy(n.nodeType.replace {
case t @ NominalType(tsym) if t.structuralView == NoType && structs.contains(tsym) =>
t.withStructuralView(structs(tsym))
})
tr(tree2)
}
}
|
retronym/slick
|
src/main/scala/scala/slick/compiler/InferTypes.scala
|
Scala
|
bsd-2-clause
| 972
|
package models.domain
import scala.slick.driver.MySQLDriver.simple._
import play.api.libs.json.Json
import scala.slick.lifted.ForeignKeyAction._
case class Recipient( card_id: Int,
recipient: String)
object Recipient{
implicit val format = Json.format[Recipient]
}
class Recipients extends Table[Recipient]("recipient") {
def card_id = column[Int]("card_id")
def username = column[String]("username")
def * = card_id ~ username <> (Recipient(_,_), Recipient.unapply)
def pk = primaryKey("recipient_PK", (username, card_id))
def card = foreignKey("card_FK", card_id, DAO.cards)(_.id, onDelete = Cascade)
def user = foreignKey("user_FK", username, DAO.users)(_.username)
}
|
PureHacks/KudosBoard
|
app/models/domain/Recipient.scala
|
Scala
|
mit
| 713
|
package com.shrbank.bigdata.storm
import akka.actor.{Actor, ActorLogging}
/**
* Created by ShaoJie.Wu on 2017/4/11 11:05.
* 功能:
*
* 备注:
*/
trait IMessageSender{
def send(input:SpoutInputMsg):Unit
}
/**
* 消息转发的actor
* 该actor用来接收client发送的消息,并调用IMessageSender.send将该消息发送出去
*/
private[storm] class MessageActor( messageSender:IMessageSender ) extends Actor with ActorLogging{
override def receive: Receive = {
case ClientMsg(msg,msgSendTime) =>
messageSender.send(SpoutInputMsg(this.sender().path,msg,msgSendTime.toString))
log.debug(s"收到客户端消息$msg,耗时${System.currentTimeMillis()-msgSendTime}")
case unKnownTypeMsg =>
this.sender() ! ServerMsg(s"不支持的消息类型($unKnownTypeMsg),请用ClientMsg进行封装")
log.error(s"非法消息 $unKnownTypeMsg")
}
}
|
shrbank/SyncSpout
|
core/src/main/scala/com/shrbank/bigdata/storm/MessageActor.scala
|
Scala
|
gpl-2.0
| 904
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.declaration
import controllers.actions.SuccessfulAuthAction
import models.declaration.{RenewRegistration, RenewRegistrationNo, RenewRegistrationYes}
import models.status.ReadyForRenewal
import org.joda.time.LocalDate
import org.jsoup.Jsoup
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.concurrent.ScalaFutures
import org.scalatestplus.mockito.MockitoSugar
import play.api.i18n.Messages
import play.api.test.Helpers._
import services.{ProgressService, RenewalService, StatusService}
import utils.{AmlsSpec, AuthorisedFixture, DependencyMocks}
import views.html.declaration.renew_registration
import scala.concurrent.Future
class RenewRegistrationControllerSpec extends AmlsSpec with MockitoSugar with ScalaFutures {
trait Fixture extends AuthorisedFixture with DependencyMocks { self =>
val request = addToken(authRequest)
lazy val view = app.injector.instanceOf[renew_registration]
val controller = new RenewRegistrationController(
dataCacheConnector = mockCacheConnector,
authAction = SuccessfulAuthAction,
progressService = mock[ProgressService],
statusService = mock[StatusService],
renewalService = mock[RenewalService],
ds = commonDependencies,
cc = mockMcc,
renew_registration = view
)
}
"RenewRegistrationController" when {
"get is called" must {
"display the renew registration question where not previously answered" in new Fixture {
val date = new LocalDate()
when {
controller.statusService.getStatus(any(),any(), any())(any(),any())
} thenReturn Future.successful(ReadyForRenewal(Some(date)))
when{
controller.dataCacheConnector.fetch[RenewRegistration](any(), any())(any(), any())
} thenReturn Future.successful(None)
val result = controller.get()(request)
status(result) must be(OK)
contentAsString(result) must include(Messages("declaration.renew.registration.title"))
}
"display the renew registration question with pre populated data" in new Fixture {
val date = new LocalDate()
when {
controller.statusService.getStatus(any(),any(), any())(any(),any())
} thenReturn Future.successful(ReadyForRenewal(Some(date)))
when {
controller.dataCacheConnector.fetch[RenewRegistration](any(), any())(any(), any())
} thenReturn Future.successful(Some(RenewRegistrationYes))
val result = controller.get()(request)
status(result) must be(OK)
val document = Jsoup.parse(contentAsString(result))
document.select("input[value=true]").hasAttr("checked") must be(true)
}
}
"post is called" when {
"with valid data" must {
"redirect to renewal what you need" when {
"yes is selected" in new Fixture {
val date = new LocalDate()
when {
controller.statusService.getStatus(any(),any(), any())(any(),any())
} thenReturn Future.successful(ReadyForRenewal(Some(date)))
mockCacheSave[RenewRegistration](RenewRegistrationYes, Some(RenewRegistration.key))
val newRequest = requestWithUrlEncodedBody("renewRegistration" -> "true")
val result = controller.post()(newRequest)
status(result) must be(SEE_OTHER)
redirectLocation(result) must be(Some(controllers.renewal.routes.WhatYouNeedController.get.url))
}
}
"redirect to the url provided by progressService" in new Fixture {
val call = controllers.routes.RegistrationProgressController.get
val newRequest = requestWithUrlEncodedBody("renewRegistration" -> "false")
val date = new LocalDate()
when {
controller.statusService.getStatus(any(),any(), any())(any(),any())
} thenReturn Future.successful(ReadyForRenewal(Some(date)))
mockCacheSave[RenewRegistration](RenewRegistrationNo, Some(RenewRegistration.key))
when {
controller.progressService.getSubmitRedirect(any[Option[String]](), any(), any())(any(), any())
} thenReturn Future.successful(Some(call))
val result = controller.post()(newRequest)
status(result) must be(SEE_OTHER)
redirectLocation(result) must be(Some(call.url))
}
}
"with invalid data" must {
"respond with BAD_REQUEST" in new Fixture {
val newRequest = requestWithUrlEncodedBody(
"renewRegistration" -> "1234567890"
)
val date = new LocalDate()
when {
controller.statusService.getStatus(any(),any(), any())(any(),any())
} thenReturn Future.successful(ReadyForRenewal(Some(date)))
mockCacheSave[RenewRegistration](RenewRegistrationNo, Some(RenewRegistration.key))
val result = controller.post()(newRequest)
status(result) must be(BAD_REQUEST)
contentAsString(result) must include(Messages("error.required.declaration.renew.registration"))
}
}
}
}
}
|
hmrc/amls-frontend
|
test/controllers/declaration/RenewRegistrationControllerSpec.scala
|
Scala
|
apache-2.0
| 5,738
|
/*
* Copyright 2013 Stephane Godbillon (@sgodbillon)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactivemongo.bson
import java.math.{ BigDecimal => JBigDec }
import scala.util.{ Failure, Success, Try }
import scala.language.implicitConversions
import scala.Iterable
import exceptions.DocumentKeyNotFound
import buffer.{
ArrayReadableBuffer,
BufferHandler,
DefaultBufferHandler,
ReadableBuffer,
WritableBuffer
}
import utils.Converters
sealed trait Producer[T] {
private[bson] def generate(): Iterable[T]
}
object Producer {
private[bson] def apply[T](f: => Iterable[T]): Producer[T] =
new Producer[T] { def generate() = f }
case class NameOptionValueProducer(
private val element: (String, Option[BSONValue])) extends Producer[BSONElement] {
private[bson] def generate() =
element._2.map(value => BSONElement(element._1, value))
}
case class OptionValueProducer(
private val element: Option[BSONValue]) extends Producer[BSONValue] {
private[bson] def generate() = element
}
implicit def element2Producer[E <% BSONElement](element: E): Producer[BSONElement] = {
val e = implicitly[BSONElement](element)
NameOptionValueProducer(e.name -> Some(e.value))
}
implicit def nameOptionValue2Producer[T](element: (String, Option[T]))(implicit writer: BSONWriter[T, _ <: BSONValue]): Producer[BSONElement] = NameOptionValueProducer(element._1 -> element._2.map(value => writer.write(value)))
implicit def noneOptionValue2Producer(element: (String, None.type)): Producer[BSONElement] = NameOptionValueProducer(element._1 -> None)
implicit def valueProducer[T](element: T)(implicit writer: BSONWriter[T, _ <: BSONValue]): Producer[BSONValue] = OptionValueProducer(Some(writer.write(element)))
implicit def optionValueProducer[T](element: Option[T])(implicit writer: BSONWriter[T, _ <: BSONValue]): Producer[BSONValue] = OptionValueProducer(element.map(writer.write(_)))
implicit val noneOptionValueProducer: None.type => Producer[BSONValue] =
_ => OptionValueProducer(None)
implicit def identityValueProducer[B <: BSONValue](value: B): Producer[BSONValue] = OptionValueProducer(Some(value))
}
sealed trait BSONValue {
/**
* The code indicating the BSON type for this value
*/
val code: Byte
/** The number of bytes for the serialized representation */
private[reactivemongo] def byteSize: Int = -1
}
object BSONValue {
import scala.util.Try
import scala.reflect.ClassTag
implicit class ExtendedBSONValue[B <: BSONValue](val bson: B) extends AnyVal {
def asTry[T](implicit reader: BSONReader[B, T]): Try[T] = {
reader.readTry(bson)
}
def asOpt[T](implicit reader: BSONReader[B, T]): Option[T] = asTry(reader).toOption
def as[T](implicit reader: BSONReader[B, T]): T = asTry(reader).get
def seeAsTry[T](implicit reader: BSONReader[_ <: BSONValue, T]): Try[T] =
Try { reader.asInstanceOf[BSONReader[BSONValue, T]].readTry(bson) }.flatten
def seeAsOpt[T](implicit reader: BSONReader[_ <: BSONValue, T]): Option[T] =
seeAsTry[T].toOption
}
final def narrow[T <: BSONValue](v: BSONValue)(implicit tag: ClassTag[T]): Option[T] = tag.unapply(v)
/**
* An addition operation for [[BSONValue]],
* so that it forms an additive semigroup with the BSON value kind.
*/
object Addition extends ((BSONValue, BSONValue) => BSONArray) {
def apply(x: BSONValue, y: BSONValue): BSONArray = (x, y) match {
case (a @ BSONArray(_), b @ BSONArray(_)) => a.merge(b)
case (a @ BSONArray(_), _) => a.merge(y)
case (_, b @ BSONArray(_)) => x +: b
case _ => BSONArray(List(x, y))
}
}
}
/** A BSON Double. */
case class BSONDouble(value: Double) extends BSONValue {
val code = 0x01: Byte
override private[reactivemongo] val byteSize = 8
}
case class BSONString(value: String) extends BSONValue {
val code = 0x02: Byte
override private[reactivemongo] lazy val byteSize = 5 + value.getBytes.size
}
/**
* A `BSONArray` structure (BSON type `0x04`).
*
* A `BSONArray` is a straightforward `BSONDocument` where keys are a sequence of positive integers.
*
* A `BSONArray` is basically a stream of tuples `(String, BSONValue)` where the first member is a string representation of an index.
* It is completely lazy. The stream it wraps is a `Stream[Try[(String, BSONValue)]]` since
* we cannot be sure that a not yet deserialized value will be processed without error.
*/
case class BSONArray(stream: Stream[Try[BSONValue]])
extends BSONValue with BSONElementSet {
val code = 0x04: Byte
type SetType = BSONArray
/**
* Returns the [[BSONValue]] at the given `index`.
*
* If there is no such `index` or the matching value cannot be deserialized, returns `None`.
*/
def get(index: Int): Option[BSONValue] = getTry(index).toOption
/**
* Returns the [[BSONValue]] matching the given `name`,
* provided it is a valid string representation of a valid index.
*/
def get(name: String): Option[BSONValue] = try {
get(name.toInt)
} catch {
case _: Throwable => None
}
/** Returns true if the given `name` corresponds to a valid index. */
def contains(name: String): Boolean = get(name).isDefined
def headOption: Option[BSONElement] = stream.collectFirst {
case Success(v) => BSONElement("0", v)
}
/**
* Returns the [[BSONValue]] at the given `index`.
*
* If there is no such `index` or the matching value cannot be deserialized, returns a `Failure`.
* The `Failure` holds a [[exceptions.DocumentKeyNotFound]] if the key could not be found.
*/
def getTry(index: Int): Try[BSONValue] = stream.drop(index).headOption.
getOrElse(Failure(DocumentKeyNotFound(index.toString)))
/**
* Returns the [[BSONValue]] at the given `index`.
*
* If there is no such `index`, the resulting option will be `None`.
* If the matching value could not be deserialized, returns a `Failure`.
*/
def getUnflattenedTry(index: Int): Try[Option[BSONValue]] = getTry(index) match {
case Failure(_: DocumentKeyNotFound) => Success(None)
case Failure(e) => Failure(e)
case Success(e) => Success(Some(e))
}
/**
* Gets the [[BSONValue]] at the given `index`, and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, or the value could not be deserialized or converted, returns a `None`.
*/
def getAs[T](index: Int)(implicit reader: BSONReader[_ <: BSONValue, T]): Option[T] = {
getTry(index).toOption.flatMap { element =>
Try(reader.asInstanceOf[BSONReader[BSONValue, T]].read(element)).toOption
}
}
/**
* Gets the [[BSONValue]] at the given `index`,
* and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, or the value could not be deserialized or converted, returns a `Failure`.
* The `Failure` holds a [[exceptions.DocumentKeyNotFound]] if the key could not be found.
*/
def getAsTry[T](index: Int)(implicit reader: BSONReader[_ <: BSONValue, T]): Try[T] = {
val tt = getTry(index)
tt.flatMap { element => Try(reader.asInstanceOf[BSONReader[BSONValue, T]].read(element)) }
}
/**
* Gets the [[BSONValue]] at the given `index`,
* and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, returns a `Success` holding `None`.
* If the value could not be deserialized or converted, returns a `Failure`.
*/
def getAsUnflattenedTry[T](index: Int)(implicit reader: BSONReader[_ <: BSONValue, T]): Try[Option[T]] = getAsTry(index)(reader) match {
case Failure(_: DocumentKeyNotFound) => Success(None)
case Failure(e) => Failure(e)
case Success(e) => Success(Some(e))
}
/** Creates a new [[BSONArray]] containing all the elements of this one and the elements of the given document. */
def merge(doc: BSONArray): BSONArray = new BSONArray(stream ++ doc.stream)
/** Creates a new [[BSONArray]] containing all the elements of this one and the given `elements`. */
def merge(values: Producer[BSONValue]*): BSONArray =
new BSONArray(stream ++ values.flatMap { v =>
v.generate().map(value => Try(value))
}.toStream)
/** Returns a [[BSONArray]] with the given value prepended to its elements. */
def prepend(value: Producer[BSONValue]): BSONArray =
new BSONArray(value.generate().map(Try(_)) ++: stream)
/** Alias for [[BSONArray.prepend]] */
def +:(value: Producer[BSONValue]): BSONArray = prepend(value)
/** Alias for the corresponding `merge` */
@inline def ++(array: BSONArray): BSONArray = merge(array)
/** Alias for `add` */
def ++(values: Producer[BSONValue]*): BSONArray = merge(values: _*)
@inline private def values(elements: Producer[BSONElement]) =
Producer[BSONValue](elements.generate().map(_.value))
/**
* The name of the produced elements are ignored,
* instead the indexes are used.
*/
def :~(elements: Producer[BSONElement]*): BSONArray =
this ++ (elements.map(values(_)): _*)
def ~:(elements: Producer[BSONElement]): BSONArray =
BSONArray(values(elements)) ++ this
@inline def size = stream.size
@inline def isEmpty: Boolean = stream.isEmpty
override def toString: String = s"BSONArray(<${if (isEmpty) "empty" else "non-empty"}>)"
def elements: List[BSONElement] = stream.zipWithIndex.collect {
case (Success(v), index) => BSONElement(index.toString, v)
}.toList
private[bson] def generate() = elements
def values: Stream[BSONValue] = stream.collect {
case Success(v) => v
}
}
object BSONArray {
/** Creates a new [[BSONArray]] containing all the given `elements`. */
def apply(elements: Producer[BSONValue]*): BSONArray =
new BSONArray(elements.flatMap {
_.generate().map(value => Try(value))
}.toStream)
/** Creates a new [[BSONArray]] containing all the `elements` in the given `Traversable`. */
def apply(elements: Iterable[BSONValue]): BSONArray = {
new BSONArray(elements.toStream.map(Try(_)))
}
/** Returns a String representing the given [[BSONArray]]. */
def pretty(array: BSONArray) =
BSONIterator.pretty(array.elements.map(Try(_)).iterator)
/** An empty BSONArray. */
val empty: BSONArray = BSONArray()
/** Writes the `document` into the `buffer`. */
private[reactivemongo] def write(value: BSONArray, buffer: WritableBuffer): WritableBuffer = DefaultBufferHandler.BSONArrayBufferHandler.write(value, buffer)
/**
* Reads a `document` from the `buffer`.
*
* Note that the buffer's readerIndex must be set on the start of a document, or it will fail.
*/
private[reactivemongo] def read(buffer: ReadableBuffer): Try[BSONArray] =
Try(DefaultBufferHandler.BSONArrayBufferHandler read buffer)
}
/**
* A BSON binary value.
*
* @param value The binary content.
* @param subtype The type of the binary content.
*/
case class BSONBinary(value: ReadableBuffer, subtype: Subtype)
extends BSONValue {
val code = 0x05: Byte
/** Returns the whole binary content as array. */
def byteArray: Array[Byte] = value.duplicate().readArray(value.size)
override private[reactivemongo] lazy val byteSize = {
5 /* header = 4 (value.readable: Int) + 1 (subtype.value.toByte) */ +
value.readable
}
override lazy val toString: String =
s"BSONBinary(${subtype}, size = ${value.readable})"
}
object BSONBinary {
def apply(value: Array[Byte], subtype: Subtype): BSONBinary =
BSONBinary(ArrayReadableBuffer(value), subtype)
def apply(id: java.util.UUID): BSONBinary = {
val buf = java.nio.ByteBuffer.wrap(Array.ofDim[Byte](16))
buf putLong id.getMostSignificantBits
buf putLong id.getLeastSignificantBits
BSONBinary(buf.array, Subtype.UuidSubtype)
}
}
/** BSON Undefined value */
case object BSONUndefined
extends BSONValue {
val code = 0x06.toByte
override private[reactivemongo] val byteSize = 0
}
/**
* BSON ObjectId value.
*
* +------------------------+------------------------+------------------------+------------------------+
* + timestamp (in seconds) + machine identifier + thread identifier + increment +
* + (4 bytes) + (3 bytes) + (2 bytes) + (3 bytes) +
* +------------------------+------------------------+------------------------+------------------------+
*/
@SerialVersionUID(239421902L)
class BSONObjectID private (private val raw: Array[Byte])
extends BSONValue with Serializable with Equals {
val code = 0x07: Byte
import java.util.Arrays
import java.nio.ByteBuffer
/** ObjectId hexadecimal String representation */
lazy val stringify = Converters.hex2Str(raw)
override def toString = s"""BSONObjectID("${stringify}")"""
override def canEqual(that: Any): Boolean = that.isInstanceOf[BSONObjectID]
override def equals(that: Any): Boolean = that match {
case BSONObjectID(other) => Arrays.equals(raw, other)
case _ => false
}
override lazy val hashCode: Int = Arrays.hashCode(raw)
/** The time of this BSONObjectId, in milliseconds */
def time: Long = this.timeSecond * 1000L
/** The time of this BSONObjectId, in seconds */
def timeSecond: Int = ByteBuffer.wrap(raw.take(4)).getInt
def valueAsArray: Array[Byte] = Arrays.copyOf(raw, 12)
@inline override private[reactivemongo] def byteSize = raw.size
}
object BSONObjectID {
def apply(array: Array[Byte]): BSONObjectID = {
if (array.length != 12)
throw new IllegalArgumentException(s"wrong byte array for an ObjectId (size ${array.length})")
new BSONObjectID(java.util.Arrays.copyOf(array, 12))
}
def unapply(id: BSONObjectID): Option[Array[Byte]] = Some(id.valueAsArray)
/** Tries to make a BSON ObjectId from a hexadecimal string representation. */
def parse(id: String): Try[BSONObjectID] = {
if (id.length != 24) Failure[BSONObjectID](
new IllegalArgumentException(s"Wrong ObjectId (length != 24): '$id'"))
else Try(new BSONObjectID(Converters str2Hex id))
}
/**
* Generates a new BSON ObjectID using the current time.
*
* @see [[fromTime]]
*/
def generate(): BSONObjectID = fromTime(System.currentTimeMillis, false)
/**
* Generates a new BSON ObjectID from the given timestamp in milliseconds.
*
* The included timestamp is the number of seconds since epoch, so a BSONObjectID time part has only
* a precision up to the second. To get a reasonably unique ID, you _must_ set `onlyTimestamp` to false.
*
* Crafting a BSONObjectID from a timestamp with `fillOnlyTimestamp` set to true is helpful for range queries,
* eg if you want of find documents an _id field which timestamp part is greater than or lesser than
* the one of another id.
*
* If you do not intend to use the produced BSONObjectID for range queries, then you'd rather use
* the `generate` method instead.
*
* @param fillOnlyTimestamp if true, the returned BSONObjectID will only have the timestamp bytes set; the other will be set to zero.
*/
def fromTime(timeMillis: Long, fillOnlyTimestamp: Boolean = true): BSONObjectID = {
val id = new Array[Byte](12)
{ // First 4bytes for seconds since Unix Epoch
// n of seconds since epoch. Big endian
val timestamp = (timeMillis / 1000).toInt
id(0) = (timestamp >>> 24).toByte
id(1) = (timestamp >> 16 & 0xFF).toByte
id(2) = (timestamp >> 8 & 0xFF).toByte
id(3) = (timestamp & 0xFF).toByte
}
if (!fillOnlyTimestamp) {
// machine id, 3 first bytes of md5(macadress or hostname)
id(4) = machineId(0)
id(5) = machineId(1)
id(6) = machineId(2)
// 2 bytes of the pid (unique per classloader). Low endian
val pid: Int = System.identityHashCode(BSONObjectID) //Thread.currentThread.getId.toInt
id(7) = (pid & 0xFF).toByte
id(8) = (pid >> 8 & 0xFF).toByte
// 3 bytes of counter sequence, which start is randomized. Big endian
val c = counter
id(9) = (c >> 16 & 0xFF).toByte
id(10) = (c >> 8 & 0xFF).toByte
id(11) = (c & 0xFF).toByte
}
BSONObjectID(id)
}
// ---
private val maxCounterValue = 16777216
private val increment = new java.util.concurrent.atomic.AtomicInteger(scala.util.Random.nextInt(maxCounterValue))
private def counter: Int =
(increment.getAndIncrement + maxCounterValue) % maxCounterValue
/**
* The following implementation of machineId work around openjdk limitations in
* version 6 and 7
*
* Openjdk fails to parse /proc/net/if_inet6 correctly to determine macaddress
* resulting in SocketException thrown.
*
* Please see:
* * https://github.com/openjdk-mirror/jdk7u-jdk/blob/feeaec0647609a1e6266f902de426f1201f77c55/src/solaris/native/java/net/NetworkInterface.c#L1130
* * http://lxr.free-electrons.com/source/net/ipv6/addrconf.c?v=3.11#L3442
* * http://lxr.free-electrons.com/source/include/linux/netdevice.h?v=3.11#L1130
* * http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=7078386
*
* and fix in openjdk8:
* * http://hg.openjdk.java.net/jdk8/tl/jdk/rev/b1814b3ea6d3
*/
private val machineId: Array[Byte] = {
import java.net._
def p(n: String) = System.getProperty(n)
val validPlatform: Boolean = Try {
val correctVersion = p("java.version").substring(0, 3).toFloat >= 1.8
val noIpv6 = p("java.net.preferIPv4Stack").toBoolean == true
val isLinux = p("os.name") == "Linux"
!isLinux || correctVersion || noIpv6
}.getOrElse(false)
// Check java policies
val permitted: Boolean = {
val sec = System.getSecurityManager()
Try {
sec.checkPermission(new NetPermission("getNetworkInformation"))
}.toOption.map(_ => true).getOrElse(false)
}
if (validPlatform && permitted) {
val networkInterfacesEnum = NetworkInterface.getNetworkInterfaces
val networkInterfaces = scala.collection.JavaConverters.enumerationAsScalaIteratorConverter(networkInterfacesEnum).asScala
val ha = networkInterfaces.find(ha => Try(ha.getHardwareAddress).isSuccess && ha.getHardwareAddress != null && ha.getHardwareAddress.length == 6)
.map(_.getHardwareAddress)
.getOrElse(InetAddress.getLocalHost.getHostName.getBytes("UTF-8"))
Converters.md5(ha).take(3)
} else {
val threadId = Thread.currentThread.getId.toInt
val arr = new Array[Byte](3)
arr(0) = (threadId & 0xFF).toByte
arr(1) = (threadId >> 8 & 0xFF).toByte
arr(2) = (threadId >> 16 & 0xFF).toByte
arr
}
}
}
/** BSON boolean value */
case class BSONBoolean(value: Boolean)
extends BSONValue {
val code = 0x08: Byte
override private[reactivemongo] val byteSize = 1
}
/** BSON date time value */
case class BSONDateTime(value: Long)
extends BSONValue {
val code = 0x09: Byte
override private[reactivemongo] val byteSize = 8
}
/** BSON null value */
case object BSONNull extends BSONValue {
val code = 0x0A: Byte
override private[reactivemongo] val byteSize = 0
}
/**
* BSON Regex value.
*
* @param flags Regex flags.
*/
case class BSONRegex(value: String, flags: String)
extends BSONValue {
val code = 0x0B: Byte
override private[reactivemongo] lazy val byteSize =
2 + value.getBytes.size + flags.getBytes.size
}
/** BSON DBPointer value. */
class BSONDBPointer private[bson] (
val value: String,
internalId: () => Array[Byte]) extends BSONValue {
val code = 0x0C: Byte
/** The BSONObjectID representation of this reference. */
val objectId = BSONObjectID(internalId())
private[bson] def withId[T](f: Array[Byte] => T): T = f(internalId())
override private[reactivemongo] lazy val byteSize: Int =
1 + value.getBytes.size + internalId().size
// ---
override def equals(that: Any): Boolean = that match {
case other: BSONDBPointer =>
(other.value -> other.objectId) == (value -> objectId)
case _ => false
}
override def hashCode: Int = (value, objectId).hashCode
override lazy val toString: String = s"BSONDBPointer(${objectId})"
}
object BSONDBPointer {
/** Returns a new DB pointer */
def apply(value: String, id: => Array[Byte]): BSONDBPointer =
new BSONDBPointer(value, () => id)
/** Extractor */
def unapply(pointer: BSONDBPointer): Option[(String, Array[Byte])] =
pointer.withId { id => Some(pointer.value -> id) }
}
/**
* BSON JavaScript value.
*
* @param value The JavaScript source code.
*/
case class BSONJavaScript(value: String) extends BSONValue {
val code = 0x0D: Byte
override private[reactivemongo] lazy val byteSize = 5 + value.getBytes.size
}
/** BSON Symbol value. */
case class BSONSymbol(value: String) extends BSONValue {
val code = 0x0E.toByte
override private[reactivemongo] lazy val byteSize = 5 + value.getBytes.size
}
/**
* BSON scoped JavaScript value.
*
* @param value The JavaScript source code.
*/
case class BSONJavaScriptWS(value: String)
extends BSONValue {
val code = 0x0F: Byte
override private[reactivemongo] lazy val byteSize = 5 + value.getBytes.size
}
/** BSON Integer value */
case class BSONInteger(value: Int) extends BSONValue {
val code = 0x10: Byte
override private[reactivemongo] val byteSize = 4
}
/** BSON Timestamp value */
case class BSONTimestamp(value: Long) extends BSONValue {
val code = 0x11: Byte
/** Seconds since the Unix epoch */
val time = value >>> 32
/** Ordinal (with the second) */
val ordinal = value.toInt
override private[reactivemongo] val byteSize = 8
}
/** Timestamp companion */
object BSONTimestamp {
/**
* Returns the timestamp corresponding to the given `time` and `ordinal`.
*
* @param time the 32bits time value (seconds since the Unix epoch)
* @param ordinal an incrementing ordinal for operations within a same second
*/
def apply(time: Long, ordinal: Int): BSONTimestamp =
BSONTimestamp((time << 32) ^ ordinal)
}
/** BSON Long value */
case class BSONLong(value: Long) extends BSONValue {
val code = 0x12: Byte
override private[reactivemongo] val byteSize = 8
}
/**
* Value wrapper for a [[https://github.com/mongodb/specifications/blob/master/source/bson-decimal128/decimal128.rst BSON 128-bit decimal]].
*
* @param high the high-order 64 bits
* @param low the low-order 64 bits
*/
@SerialVersionUID(1667418254L)
final class BSONDecimal(val high: Long, val low: Long)
extends BSONValue with Product2[Long, Long] {
val code = 0x13: Byte
/** Returns true if is negative. */
lazy val isNegative: Boolean =
(high & Decimal128.SignBitMask) == Decimal128.SignBitMask
/** Returns true if is infinite. */
lazy val isInfinite: Boolean =
(high & Decimal128.InfMask) == Decimal128.InfMask
/** Returns true if is Not-A-Number (NaN). */
lazy val isNaN: Boolean =
(high & Decimal128.NaNMask) == Decimal128.NaNMask
override private[reactivemongo] lazy val byteSize = 16
// ---
/**
* Returns the [[https://github.com/mongodb/specifications/blob/master/source/bson-decimal128/decimal128.rst#to-string-representation string representation]].
*/
override def toString: String = Decimal128.toString(this)
@inline def _1 = high
@inline def _2 = low
def canEqual(that: Any): Boolean = that match {
case BSONDecimal(_, _) => true
case _ => false
}
override def equals(that: Any): Boolean = that match {
case BSONDecimal(h, l) => (high == h) && (low == l)
case _ => false
}
override lazy val hashCode: Int = {
val result = (low ^ (low >>> 32)).toInt
31 * result + (high ^ (high >>> 32)).toInt
}
}
object BSONDecimal {
import java.math.MathContext
/**
* Factory alias.
*
* @param high the high-order 64 bits
* @param low the low-order 64 bits
*/
@inline def apply(high: Long, low: Long): BSONDecimal =
new BSONDecimal(high, low)
/**
* Returns a BSON decimal (Decimal128) corresponding to the given BigDecimal.
*
* @param value the BigDecimal representation
*/
@inline def fromBigDecimal(value: JBigDec): Try[BSONDecimal] =
Decimal128.fromBigDecimal(value, value.signum == -1)
/**
* Returns a BSON decimal (Decimal128) corresponding to the given BigDecimal.
*
* @param value the BigDecimal representation
*/
@inline def fromBigDecimal(value: BigDecimal): Try[BSONDecimal] =
Decimal128.fromBigDecimal(value.bigDecimal, value.signum == -1)
/**
* Returns a Decimal128 value represented the given high 64bits value,
* using a default for the low one.
*
* @param high the high-order 64 bits
*/
@inline def fromLong(high: Long): Try[BSONDecimal] =
fromBigDecimal(new JBigDec(high, MathContext.DECIMAL128))
/**
* Returns the Decimal128 corresponding to the given string representation.
*
* @param repr the Decimal128 value represented as string
* @see [[https://github.com/mongodb/specifications/blob/master/source/bson-decimal128/decimal128.rst#from-string-representation Decimal128 string representation]]
*/
def parse(repr: String): Try[BSONDecimal] = Decimal128.parse(repr)
/** Returns the corresponding BigDecimal. */
def toBigDecimal(decimal: BSONDecimal): Try[BigDecimal] =
Decimal128.toBigDecimal(decimal).map(BigDecimal(_))
/** Extracts the (high, low) representation. */
def unapply(that: Any): Option[(Long, Long)] = that match {
case decimal: BSONDecimal => Some(decimal.high -> decimal.low)
case _ => None
}
// ---
/**
* Decimal128 representation of the positive infinity
*/
val PositiveInf: BSONDecimal = BSONDecimal(Decimal128.InfMask, 0)
/**
* Decimal128 representation of the negative infinity
*/
val NegativeInf: BSONDecimal =
BSONDecimal(Decimal128.InfMask | Decimal128.SignBitMask, 0)
/**
* Decimal128 representation of a negative Not-a-Number (-NaN) value
*/
val NegativeNaN: BSONDecimal =
BSONDecimal(Decimal128.NaNMask | Decimal128.SignBitMask, 0)
/**
* Decimal128 representation of a Not-a-Number (NaN) value
*/
val NaN: BSONDecimal = BSONDecimal(Decimal128.NaNMask, 0)
/**
* Decimal128 representation of a postive zero value
*/
val PositiveZero: BSONDecimal =
BSONDecimal(0x3040000000000000L, 0x0000000000000000L)
/**
* Decimal128 representation of a negative zero value
*/
val NegativeZero: BSONDecimal =
BSONDecimal(0xb040000000000000L, 0x0000000000000000L)
}
/** BSON Min key value */
object BSONMinKey extends BSONValue {
val code = 0xFF.toByte
override private[reactivemongo] val byteSize = 0
override val toString = "BSONMinKey"
}
/** BSON Max key value */
object BSONMaxKey extends BSONValue {
val code = 0x7F: Byte
override private[reactivemongo] val byteSize = 0
override val toString = "BSONMaxKey"
}
/** Binary Subtype */
sealed trait Subtype {
/** Subtype code */
val value: Byte
}
object Subtype {
case object GenericBinarySubtype extends Subtype { val value = 0x00: Byte }
case object FunctionSubtype extends Subtype { val value = 0x01: Byte }
case object OldBinarySubtype extends Subtype { val value = 0x02: Byte }
case object OldUuidSubtype extends Subtype { val value = 0x03: Byte }
case object UuidSubtype extends Subtype { val value = 0x04: Byte }
case object Md5Subtype extends Subtype { val value = 0x05: Byte }
case object UserDefinedSubtype extends Subtype { val value = 0x80.toByte }
def apply(code: Byte) = code match {
case 0 => GenericBinarySubtype
case 1 => FunctionSubtype
case 2 => OldBinarySubtype
case 3 => OldUuidSubtype
case 4 => UuidSubtype
case 5 => Md5Subtype
case -128 => UserDefinedSubtype
case _ => throw new NoSuchElementException(s"binary type = $code")
}
}
/**
* Operations for a [[BSONElement]] that can contain multiple nested elements.
*
* @define keyParam the key to be found in the document
*/
sealed trait BSONElementSet extends ElementProducer { self: BSONValue =>
type SetType <: BSONElementSet
/** The first/mandatory nested element, if any */
def headOption: Option[BSONElement]
/** Returns the values for the nested elements. */
def values: Iterable[BSONValue]
/**
* Returns a list for the values as [[BSONElement]]s,
* with their indexes as names (e.g. "0" for the first).
*/
def elements: Iterable[BSONElement]
/** Returns a `Map` representation for this element set. */
def toMap: Map[String, BSONValue] = elements.iterator.map {
case BSONElement(name, value) => name -> value
}.to(Map)
/**
* Checks whether the given key is found in this element set.
*
* @param key $keyParam
* @return true if the key is found
*/
def contains(key: String): Boolean
/**
* Returns the [[BSONValue]] associated with the given `key`.
* If the key cannot be found, returns `None`.
*
* @param key $keyParam
*/
def get(key: String): Option[BSONValue]
/** Merge the produced elements at the beginning of this set */
def ~:(elements: Producer[BSONElement]): SetType
/** Merge the produced elements with this set */
def :~(elements: Producer[BSONElement]*): SetType
/** The number of elements */
def size: Int
/** Indicates whether this element set is empty */
def isEmpty: Boolean
override private[reactivemongo] lazy val byteSize: Int =
elements.foldLeft(5) {
case (sz, BSONElement(n, v)) =>
sz + BSONElementSet.docElementByteOverhead + n.getBytes.size + v.byteSize
}
}
object BSONElementSet {
def unapplySeq(that: BSONElementSet): Option[List[BSONElement]] =
Some(that.elements.toList)
/** Automatic conversion from elements collections to [[BSONElementSet]]. */
implicit def apply(set: Iterable[BSONElement]): BSONElementSet =
new BSONDocument(set.map(Success(_)).toStream)
// key null terminator + type prefix for the element
private[reactivemongo] val docElementByteOverhead = 2
}
/**
* A `BSONDocument` structure (BSON type `0x03`).
*
* A `BSONDocument` is basically a stream of tuples `(String, BSONValue)`.
* It is completely lazy. The stream it wraps is a `Stream[Try[(String, BSONValue)]]` since
* we cannot be sure that a not yet deserialized value will be processed without error.
*
* @define keyParam the key to be found in the document
*/
case class BSONDocument(stream: Stream[Try[BSONElement]])
extends BSONValue with BSONElementSet {
val code = 0x03.toByte
type SetType = BSONDocument
def contains(key: String): Boolean = elements.exists(_.name == key)
def get(key: String): Option[BSONValue] = elements.collectFirst {
case BSONElement(`key`, value) => value
}
/**
* Returns the [[BSONValue]] associated with the given `key`.
*
* If the key is not found or the matching value cannot be deserialized, returns a `Failure`.
* The `Failure` holds a [[exceptions.DocumentKeyNotFound]] if the key could not be found.
*
* @param key $keyParam
*/
def getTry(key: String): Try[BSONValue] = stream.collectFirst {
case Success(BSONElement(k, cause)) if k == key => Success(cause)
case Failure(e) => Failure(e)
}.getOrElse(Failure(DocumentKeyNotFound(key)))
/**
* Returns the [[BSONValue]] associated with the given `key`.
*
* If the key could not be found, the resulting option will be `None`.
* If the matching value could not be deserialized, returns a `Failure`.
*
* @param key $keyParam
*/
def getUnflattenedTry(key: String): Try[Option[BSONValue]] =
getTry(key) match {
case Failure(DocumentKeyNotFound(_)) => Success(None)
case Failure(e) => Failure(e)
case Success(e) => Success(Some(e))
}
/**
* Returns the [[BSONValue]] associated with the given `key`, and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, or the value could not be deserialized or converted, returns a `None`.
*
* @param key $keyParam
*
* @note When implementing a [[http://reactivemongo.org/releases/latest/documentation/bson/typeclasses.html custom reader]], [[getAsTry]] must be preferred.
*/
def getAs[T](key: String)(implicit reader: BSONReader[_ <: BSONValue, T]): Option[T] = get(key).flatMap { element =>
reader match {
case r: BSONReader[BSONValue, T] @unchecked => r.readOpt(element)
case _ => None
}
}
/**
* Returns the [[BSONValue]] associated with the given `key`, and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, or the value could not be deserialized or converted, returns a `Failure`.
* The `Failure` holds a [[exceptions.DocumentKeyNotFound]] if the key could not be found.
*
* @param key $keyParam
*/
def getAsTry[T](key: String)(implicit reader: BSONReader[_ <: BSONValue, T]): Try[T] = {
val tt = getTry(key)
tt.flatMap {
case BSONNull =>
Failure(DocumentKeyNotFound(key))
case element =>
Try(reader.asInstanceOf[BSONReader[BSONValue, T]].read(element))
}
}
/**
* Returns the [[BSONValue]] associated with the given `key`, and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, returns a `Success` holding `None`.
* If the value could not be deserialized or converted, returns a `Failure`.
*/
def getAsUnflattenedTry[T](key: String)(implicit reader: BSONReader[_ <: BSONValue, T]): Try[Option[T]] = getAsTry(key)(reader) match {
case Failure(_: DocumentKeyNotFound) => Success(None)
case Failure(e) => Failure(e)
case Success(e) => Success(Some(e))
}
/** Creates a new [[BSONDocument]] containing all the elements of this one and the elements of the given document. */
def merge(doc: BSONDocument): BSONDocument =
new BSONDocument(stream ++ doc.stream)
/** Creates a new [[BSONDocument]] containing all the elements of this one and the given `elements`. */
def merge(elements: Producer[BSONElement]*): BSONDocument =
new BSONDocument(stream ++ elements.flatMap(
_.generate().map(value => Try(value))).toStream)
/** Creates a new [[BSONDocument]] without the elements corresponding the given `keys`. */
def remove(keys: String*): BSONDocument = new BSONDocument(stream.filter {
case Success(BSONElement(key, _)) if (
keys contains key) => false
case _ => true
})
/** Alias for `add(doc: BSONDocument): BSONDocument` */
def ++(doc: BSONDocument): BSONDocument = merge(doc)
/** Alias for `:~` or `merge` */
def ++(elements: Producer[BSONElement]*): BSONDocument = merge(elements: _*)
def :~(elements: Producer[BSONElement]*): BSONDocument = merge(elements: _*)
def ~:(elements: Producer[BSONElement]): BSONDocument =
new BSONDocument(elements.generate().map(Success(_)) ++: stream)
/** Alias for `remove(names: String*)` */
def --(keys: String*): BSONDocument = remove(keys: _*)
def headOption: Option[BSONElement] = stream.collectFirst {
case Success(first) => first
}
/** Returns a `Stream` for all the elements of this `BSONDocument`. */
lazy val elements: Stream[BSONElement] = stream.collect {
case Success(v) => v
}
private[bson] def generate() = elements
def values: Stream[BSONValue] = stream.collect {
case Success(BSONElement(_, value)) => value
}
@inline def isEmpty = stream.isEmpty
@inline def size = stream.size
override def toString: String = "BSONDocument(<" + (if (isEmpty) "empty" else "non-empty") + ">)"
}
object BSONDocument {
/** Creates a [[BSONDocument]] from the given elements set. */
def apply(set: BSONElementSet): BSONDocument = set match {
case doc @ BSONDocument(_) => doc
case _ => BSONDocument.empty :~ set
}
/** Creates a new [[BSONDocument]] containing all the given `elements`. */
def apply(elements: Producer[BSONElement]*): BSONDocument =
new BSONDocument(elements.flatMap(
_.generate().map(value => Try(value))).toStream)
/**
* Creates a new [[BSONDocument]] containing all the `elements`
* in the given `Traversable`.
*/
def apply(elements: Iterable[(String, BSONValue)]): BSONDocument =
new BSONDocument(elements.toStream.map {
case (n, v) => Success(BSONElement(n, v))
})
/** Returns a String representing the given [[BSONDocument]]. */
def pretty(doc: BSONDocument) = BSONIterator.pretty(doc.stream.iterator)
/** Writes the `document` into the `buffer`. */
def write(value: BSONDocument, buffer: WritableBuffer)(implicit bufferHandler: BufferHandler = DefaultBufferHandler): WritableBuffer =
bufferHandler.writeDocument(value, buffer)
/**
* Reads a `document` from the `buffer`.
*
* Note that the buffer's readerIndex must be set on the start of a document, or it will fail.
*/
def read(buffer: ReadableBuffer)(implicit bufferHandler: BufferHandler = DefaultBufferHandler): BSONDocument = bufferHandler.readDocument(buffer).get
/** An empty BSONDocument. */
val empty: BSONDocument = BSONDocument()
}
case class BSONElement(
name: String,
value: BSONValue) extends ElementProducer {
def generate() = Option(this)
}
object BSONElement extends BSONElementLowPriority {
implicit def provided(pair: (String, BSONValue)): BSONElement =
BSONElement(pair._1, pair._2)
}
sealed trait BSONElementLowPriority {
implicit def converted[T](pair: (String, T))(implicit w: BSONWriter[T, _ <: BSONValue]): BSONElement = BSONElement(pair._1, w.write(pair._2))
}
sealed trait ElementProducer extends Producer[BSONElement]
object ElementProducer {
/**
* An empty instance for the [[ElementProducer]] kind.
* Can be used as `id` with the element [[Composition]] to form
* an additive monoid.
*/
case object Empty extends ElementProducer {
def generate() = List.empty[BSONElement]
}
/**
* A composition operation for [[ElementProducer]],
* so that it forms an additive monoid with the [[Empty]] instance as `id`.
*/
object Composition
extends ((ElementProducer, ElementProducer) => ElementProducer) {
def apply(x: ElementProducer, y: ElementProducer): ElementProducer =
(x, y) match {
case (Empty, Empty) => Empty
case (Empty, _) => y
case (_, Empty) => x
case (a @ BSONElementSet(_), b @ BSONElementSet(_)) => a :~ b
case (a @ BSONElementSet(_), _) => a :~ y
case (_, b @ BSONElementSet(_)) => x ~: b
case _ => BSONDocument(x, y)
}
}
}
|
cchantep/ReactiveMongo
|
bson/src/main/scala-2.13+/types.scala
|
Scala
|
apache-2.0
| 39,305
|
/*
// Copyright 2012/2013 de Gustavo Steinberg, Flavio Soares, Pierre Andrews, Gustavo Salazar Torres, Thomaz Abramo
//
// Este arquivo é parte do programa Vigia Político. O projeto Vigia
// Político é um software livre; você pode redistribuí-lo e/ou
// modificá-lo dentro dos termos da GNU Affero General Public License
// como publicada pela Fundação do Software Livre (FSF); na versão 3 da
// Licença. Este programa é distribuído na esperança que possa ser útil,
// mas SEM NENHUMA GARANTIA; sem uma garantia implícita de ADEQUAÇÃO a
// qualquer MERCADO ou APLICAÇÃO EM PARTICULAR. Veja a licença para
// maiores detalhes. Você deve ter recebido uma cópia da GNU Affero
// General Public License, sob o título "LICENCA.txt", junto com este
// programa, se não, acesse http://www.gnu.org/licenses/
*/
package models
import play.api.db._
import play.api.Play.current
import java.util.Date
import anorm._
import anorm.SqlParser._
case class UserLawRegion(userId: Long, lawRegionId: Long)
object UserLawRegion {
val simple = {
( get[Long]("user_id") ~
get[Long]("law_region_id")) map {
case user_id ~ law_region_id =>
UserLawRegion(user_id, law_region_id)
}
}
def all(): Seq[UserLawRegion] = {
DB.withConnection { implicit connection =>
SQL("select * from user_law_regions").as(UserLawRegion.simple *)
}
}
def findByUser(user:User): Seq[UserLawRegion] = {
DB.withConnection { implicit connection =>
SQL("select * from user_law_regions where user_id={user_id}").on('user_id -> user.id.get).as(UserLawRegion.simple *)
}
}
def save(user: User, lawRegion: LawRegion) {
DB.withConnection { implicit connection =>
SQL("""
INSERT INTO user_law_regions(user_id, law_region_id)
VALUES({user_id}, {law_region_id}) ON DUPLICATE KEY UPDATE user_id=user_id, law_region_id=law_region_id
""")
.on(
'user_id -> user.id,
'law_region_id -> lawRegion.id).executeInsert()
}
}
def deleteByUserAndRegion(user: User, lawRegion: LawRegion){
DB.withConnection { implicit connection =>
SQL("""
DELETE IGNORE FROM user_law_regions
WHERE user_id={user_id} AND law_region_id={law_region_id}
""")
.on(
'user_id -> user.id,
'law_region_id -> lawRegion.id).executeInsert()
}
}
def deleteByUser(user: User) {
DB.withConnection { implicit connection =>
SQL("""
DELETE FROM user_law_regions
WHERE user_id={user_id}
""")
.on(
'user_id -> user.id).executeInsert()
}
}
def deleteByLawRegion(lawRegion: LawRegion) {
DB.withConnection { implicit connection =>
SQL("""
DELETE FROM user_law_regions
WHERE law_region_id={law_region_id}
""")
.on(
'law_region_id -> lawRegion.id).executeInsert()
}
}
}
|
cidadao-automatico/cidadao-server
|
app/models/UserLawRegion.scala
|
Scala
|
agpl-3.0
| 2,934
|
package com.oni.udash.views.components
import com.oni.udash.config.ExternalUrls
import com.oni.udash.styles.{DemoStyles, GlobalStyles}
import com.oni.udash.styles.partials.FooterStyles
import org.scalajs.dom.raw.Element
import scalatags.JsDom.all._
import scalacss.ScalatagsCss._
object Footer {
private lazy val template = footer(FooterStyles.footer)(
div(GlobalStyles.body)(
div(FooterStyles.footerInner)(
// a(FooterStyles.footerLogo, href := ExternalUrls.homepage)(
// Image("udash_logo.png", "Udash Framework", GlobalStyles.block)
// ),
div(FooterStyles.footerLinks)(
// p(FooterStyles.footerMore)("See more"),
// ul(
// li(DemoStyles.navItem)(
// a(href := ExternalUrls.udashDemos, target := "_blank", DemoStyles.underlineLink)("Github demo")
// ),
// li(DemoStyles.navItem)(
// a(href := ExternalUrls.stackoverflow, target := "_blank", DemoStyles.underlineLink)("StackOverflow questions")
// )
// )
),
p(FooterStyles.footerCopyrights)("Copyright by ", a(FooterStyles.footerAvsystemLink, href := ExternalUrls.oniservices, target := "_blank")("Object Nirvana"))
)
)
).render
def getTemplate: Element = template
}
|
ObjectNirvana/oni-web
|
frontend/src/main/scala/com/oni/udash/views/components/Footer.scala
|
Scala
|
epl-1.0
| 1,290
|
package com.datastax.spark.connector.cql
import org.apache.spark.SparkConf
import org.scalatest.{FlatSpec, Matchers}
import com.datastax.spark.connector.testkit._
import com.datastax.spark.connector.embedded._
case class KeyValue(key: Int, group: Long, value: String)
case class KeyValueWithConversion(key: String, group: Int, value: Long)
class CassandraConnectorSpec extends FlatSpec with Matchers with SharedEmbeddedCassandra {
useCassandraConfig("cassandra-default.yaml.template")
val conn = CassandraConnector(Set(cassandraHost))
val createKeyspaceCql = "CREATE KEYSPACE IF NOT EXISTS test WITH REPLICATION = { 'class': 'SimpleStrategy', 'replication_factor': 1 }"
"A CassandraConnector" should "connect to Cassandra with native protocol" in {
conn.withSessionDo { session =>
assert(session.isClosed === false)
assert(session !== null)
}
}
it should "connect to Cassandra with thrift" in {
conn.withCassandraClientDo { client =>
assert(client.describe_cluster_name() === "Test Cluster")
}
}
it should "give access to cluster metadata" in {
conn.withClusterDo { cluster =>
assert(cluster.getMetadata.getClusterName === "Test Cluster")
assert(cluster.getMetadata.getAllHosts.size > 0)
}
}
it should "run queries" in {
conn.withSessionDo { session =>
session.execute(createKeyspaceCql)
session.execute("DROP TABLE IF EXISTS test.simple_query")
session.execute("CREATE TABLE test.simple_query (key INT PRIMARY KEY, value TEXT)")
session.execute("INSERT INTO test.simple_query(key, value) VALUES (1, 'value')")
val result = session.execute("SELECT * FROM test.simple_query WHERE key = ?", 1.asInstanceOf[AnyRef])
assert(result.one().getString("value") === "value")
}
}
it should "cache PreparedStatements" in {
conn.withSessionDo { session =>
session.execute(createKeyspaceCql)
session.execute("DROP TABLE IF EXISTS test.pstmt")
session.execute("CREATE TABLE test.pstmt (key INT PRIMARY KEY, value TEXT)")
val stmt1 = session.prepare("INSERT INTO test.pstmt (key, value) VALUES (?, ?)")
val stmt2 = session.prepare("INSERT INTO test.pstmt (key, value) VALUES (?, ?)")
assert(stmt1 eq stmt2)
}
}
it should "disconnect from the cluster after use" in {
val cluster = conn.withClusterDo { cluster => cluster }
Thread.sleep(CassandraConnector.keepAliveMillis * 2)
assert(cluster.isClosed === true)
}
it should "share internal Cluster and Session object between multiple logical sessions" in {
val session1 = conn.openSession()
val threadCount1 = Thread.activeCount()
val session2 = conn.openSession()
val threadCount2 = Thread.activeCount()
session1.getCluster should be theSameInstanceAs session2.getCluster
// Unfortunately we don't have a way to obtain an internal Session object; all we got here are proxies.
// Instead, we try to figure out whether a new Session was opened by counting active threads.
// Opening internal Session creates new threads, so if none was created, the thread count would not change.
threadCount1 shouldEqual threadCount2
session1.close()
session1.isClosed shouldEqual true
session2.isClosed shouldEqual false
session2.close()
session2.isClosed shouldEqual true
}
it should "share internal Cluster object between multiple logical sessions created by different connectors to the same cluster" in {
val conn2 = CassandraConnector(Set(EmbeddedCassandra.cassandraHost))
val session1 = conn.openSession()
val threadCount1 = Thread.activeCount()
val session2 = conn2.openSession()
val threadCount2 = Thread.activeCount()
session1.getCluster should be theSameInstanceAs session2.getCluster
threadCount1 shouldEqual threadCount2
session1.close()
session1.isClosed shouldEqual true
session2.isClosed shouldEqual false
session2.close()
session2.isClosed shouldEqual true
}
it should "be configurable from SparkConf" in {
val host = EmbeddedCassandra.cassandraHost.getHostAddress
val conf = new SparkConf(loadDefaults = true)
.set(CassandraConnectorConf.CassandraConnectionHostProperty, host)
// would throw exception if connection unsuccessful
val conn2 = CassandraConnector(conf)
conn2.withSessionDo { session => }
}
it should "accept multiple hostnames in spark.cassandra.connection.host property" in {
val goodHost = EmbeddedCassandra.cassandraHost.getHostAddress
val invalidHost = "192.168.254.254"
// let's connect to two addresses, of which the first one is deliberately invalid
val conf = new SparkConf(loadDefaults = true)
.set(CassandraConnectorConf.CassandraConnectionHostProperty, invalidHost + "," + goodHost)
// would throw exception if connection unsuccessful
val conn2 = CassandraConnector(conf)
conn2.withSessionDo { session => }
}
}
|
brkyvz/spark-cassandra-connector
|
spark-cassandra-connector/src/it/scala/com/datastax/spark/connector/cql/CassandraConnectorSpec.scala
|
Scala
|
apache-2.0
| 4,949
|
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.text
import java.time.format.DateTimeFormatter
import java.time.{ZoneOffset, ZonedDateTime}
import java.util.Date
import org.junit.runner.RunWith
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class StringSerializationTest extends Specification {
def stringMap: Map[String, Class[_]] = Seq("foo", "bar", "bar:String").map(_ -> classOf[String]).toMap
"StringSerialization" should {
"encode and decode string seqs" >> {
"with empty values" >> {
StringSerialization.encodeSeq(Seq.empty) mustEqual ""
StringSerialization.decodeSeq("") must beEmpty
}
"with a single value" >> {
val values = Seq("foo")
val encoded = StringSerialization.encodeSeq(values)
StringSerialization.decodeSeq(encoded) mustEqual values
}
"with multiple values" >> {
val values = Seq("foo", "bar", "baz")
val encoded = StringSerialization.encodeSeq(values)
StringSerialization.decodeSeq(encoded) mustEqual values
}
"with escaped values" >> {
val values = Seq("foo", "bar:String", "'\"],blerg", "baz", "")
val encoded = StringSerialization.encodeSeq(values)
StringSerialization.decodeSeq(encoded) mustEqual values
}
}
"encode and decode string maps" >> {
"with empty values" >> {
StringSerialization.encodeMap(Map.empty) mustEqual ""
StringSerialization.decodeMap("") must beEmpty
}
"with a single value" >> {
val values = Map("foo" -> "bar")
val encoded = StringSerialization.encodeMap(values)
StringSerialization.decodeMap(encoded) mustEqual values
}
"with multiple values" >> {
val values = Map("foo" -> "bar", "bar" -> "foo")
val encoded = StringSerialization.encodeMap(values)
StringSerialization.decodeMap(encoded) mustEqual values
}
"with escaped values" >> {
val values = Map("foo" -> "bar:String", "bar" -> "'\"],blerg", "baz" -> "")
val encoded = StringSerialization.encodeMap(values)
StringSerialization.decodeMap(encoded) mustEqual values
}
}
"encode and decode seq maps" >> {
"with empty values" >> {
StringSerialization.encodeSeqMap(Map.empty) mustEqual ""
StringSerialization.decodeSeqMap("", Map.empty[String, Class[_]]) must beEmpty
}
"with a single value" >> {
val values = Map("foo" -> Seq("bar", "baz"))
val encoded = StringSerialization.encodeSeqMap(values)
StringSerialization.decodeSeqMap(encoded, stringMap).mapValues(_.toSeq) mustEqual values
}
"with multiple values" >> {
val values = Map("foo" -> Seq("bar", "baz"), "bar" -> Seq("foo", "baz"))
val encoded = StringSerialization.encodeSeqMap(values)
StringSerialization.decodeSeqMap(encoded, stringMap).mapValues(_.toSeq) mustEqual values
}
"with escaped values" >> {
val values = Map("foo" -> Seq("bar", "baz"), "bar:String" -> Seq("'\"],blerg", "blah", "", "test"))
val encoded = StringSerialization.encodeSeqMap(values)
StringSerialization.decodeSeqMap(encoded, stringMap).mapValues(_.toSeq) mustEqual values
}
"with non-string values" >> {
val dt = DateTimeFormatter.ISO_OFFSET_DATE_TIME.withZone(ZoneOffset.UTC)
val dates = Seq("2017-01-01T00:00:00.000Z", "2017-01-01T01:00:00.000Z").map(d => Date.from(ZonedDateTime.parse(d, dt).toInstant))
val values = Map("dtg" -> dates, "age" -> Seq(0, 1, 2).map(Int.box), "height" -> Seq(0.1f, 0.2f, 0.5f).map(Float.box))
val encoded = StringSerialization.encodeSeqMap(values)
val bindings = Map("dtg" -> classOf[Date], "age" -> classOf[Integer], "height" -> classOf[java.lang.Float])
StringSerialization.decodeSeqMap(encoded, bindings).mapValues(_.toSeq) mustEqual values
}
}
}
}
|
locationtech/geomesa
|
geomesa-utils/src/test/scala/org/locationtech/geomesa/utils/text/StringSerializationTest.scala
|
Scala
|
apache-2.0
| 4,439
|
package com.sksamuel.elastic4s.http.search
import cats.syntax.either._
import com.sksamuel.elastic4s.get.HitField
import com.sksamuel.elastic4s.http.{Shards, SourceAsContentBuilder}
import com.sksamuel.elastic4s.{Hit, HitReader}
case class SearchHit(private val _id: String,
private val _index: String,
private val _type: String,
private val _score: Float,
private val _source: Map[String, AnyRef],
fields: Map[String, AnyRef],
highlight: Map[String, Seq[String]],
private val _version: Long) extends Hit {
def highlightFragments(name: String): Seq[String] = Option(highlight).getOrElse(Map.empty).getOrElse(name, Nil)
override def index: String = _index
override def id: String = _id
override def `type`: String = _type
override def version: Long = _version
def storedField(fieldName: String): HitField = storedFieldOpt(fieldName).get
def storedFieldOpt(fieldName: String): Option[HitField] = fields.get(fieldName).map { v =>
new HitField {
override def values: Seq[AnyRef] = v match {
case values: Seq[AnyRef] => values
case value: AnyRef => Seq(value)
}
override def value: AnyRef = values.head
override def name: String = fieldName
override def isMetadataField: Boolean = ???
}
}
override def sourceAsMap: Map[String, AnyRef] = _source
override def sourceAsString: String = SourceAsContentBuilder(_source).string()
override def exists: Boolean = true
override def score: Float = _score
}
case class SearchHits(total: Int,
private val max_score: Double,
hits: Array[SearchHit]) {
def maxScore: Double = max_score
def size: Int = hits.length
def isEmpty: Boolean = hits.isEmpty
def nonEmpty: Boolean = hits.nonEmpty
}
case class SuggestionEntry(term: String) {
def options: Seq[String] = Nil
def optionsText: String = ""
}
case class CompletionSuggestionResult(entries: Seq[SuggestionEntry]) {
def entry(term: String): SuggestionEntry = entries.find(_.term == term).get
}
case class PhraseSuggestionResult(entries: Seq[SuggestionEntry]) {
def entry(term: String): SuggestionEntry = entries.find(_.term == term).get
}
case class SuggestionOption(text: String, score: Double, freq: Int)
case class SuggestionResult(text: String,
offset: Int,
length: Int,
options: Seq[SuggestionOption]) {
def toTerm: TermSuggestionResult = TermSuggestionResult(text, offset, length, options)
}
case class TermSuggestionResult(text: String,
offset: Int,
length: Int,
options: Seq[SuggestionOption]) {
def optionsText: Seq[String] = options.map(_.text)
}
case class Bucket(key: String,
private val doc_count: Int) {
def docCount: Int = doc_count
@deprecated("use getDocCount", "5.2.9")
def getDocCount: Int = docCount
}
trait AggregationResponse {
protected def aggdata: Map[String, AnyRef]
protected def agg(name: String): Map[String, AnyRef] = aggdata(name).asInstanceOf[Map[String, AnyRef]]
def termsAgg(name: String): TermsAggregationResult = {
TermsAggregationResult(
name,
agg(name)("buckets").asInstanceOf[Seq[Map[String, AnyRef]]].map { map => Bucket(map("key").toString, map("doc_count").toString.toInt) },
agg(name)("doc_count_error_upper_bound").toString.toInt,
agg(name)("sum_other_doc_count").toString.toInt
)
}
def sumAgg(name: String): SumAggregationResult = SumAggregationResult(name, agg(name)("value").toString.toDouble)
def minAgg(name: String): MinAggregationResult = MinAggregationResult(name, agg(name)("value").toString.toDouble)
def maxAgg(name: String): MaxAggregationResult = MaxAggregationResult(name, agg(name)("value").toString.toDouble)
def filterAgg(name: String): FilterAggregationResult = FilterAggregationResult(name, agg(name)("doc_count").toString.toInt, agg(name))
}
case class SearchResponse(took: Int,
private val timed_out: Boolean,
private val terminated_early: Option[Boolean],
private val suggest: Map[String, Seq[SuggestionResult]],
private val _shards: Shards,
private val _scroll_id: String,
aggregations: Map[String, AnyRef],
hits: SearchHits) extends AggregationResponse {
protected def aggdata: Map[String, AnyRef] = aggregations
def totalHits: Int = hits.total
def size: Int = hits.size
def ids: Seq[String] = hits.hits.map(_.id)
def maxScore: Double = hits.maxScore
def scrollId: Option[String] = Option(_scroll_id)
def shards: Shards = _shards
def isTimedOut: Boolean = timed_out
def isTerminatedEarly: Option[Boolean] = terminated_early
def isEmpty: Boolean = hits.isEmpty
def nonEmpty: Boolean = hits.nonEmpty
private def suggestion(name: String): Map[String, SuggestionResult] = suggest(name).map { result => result.text -> result }.toMap
def termSuggestion(name: String): Map[String, TermSuggestionResult] = suggestion(name).mapValues(_.toTerm)
def completionSuggestion(name: String): CompletionSuggestionResult = suggestion(name).asInstanceOf[CompletionSuggestionResult]
def phraseSuggestion(name: String): PhraseSuggestionResult = suggestion(name).asInstanceOf[PhraseSuggestionResult]
def to[T: HitReader]: IndexedSeq[T] = safeTo.flatMap(_.toOption)
def safeTo[T: HitReader]: IndexedSeq[Either[Throwable, T]] = hits.hits.map(_.safeTo[T]).toIndexedSeq
}
case class SumAggregationResult(name: String, value: Double)
case class MinAggregationResult(name: String, value: Double)
case class MaxAggregationResult(name: String, value: Double)
case class FilterAggregationResult(name: String,
docCount: Int,
aggdata: Map[String, AnyRef]) extends AggregationResponse
case class TermsAggregationResult(name: String,
buckets: Seq[Bucket],
docCountErrorUpperBound: Int,
otherDocCount: Int) {
@deprecated("use buckets", "5.2.9")
def getBuckets: Seq[Bucket] = buckets
@deprecated("use bucket", "5.2.9")
def getBucketByKey(key: String): Bucket = bucket(key)
def bucket(key: String): Bucket = bucketOpt(key).get
def bucketOpt(key: String): Option[Bucket] = buckets.find(_.key == key)
}
|
tyth/elastic4s
|
elastic4s-http/src/main/scala/com/sksamuel/elastic4s/http/search/responses.scala
|
Scala
|
apache-2.0
| 6,686
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.matchers
import org.scalatest._
import Matchers._
class BeMatcherSpec extends FunSpec {
describe("BeMatcher ") {
describe("instance created by BeMatcher apply method") {
val beMatcher = BeMatcher[List[Int]] { list =>
MatchResult(true, "test", "test")
}
it("should have pretty toString") {
beMatcher.toString should be ("BeMatcher[scala.collection.immutable.List](scala.collection.immutable.List => MatchResult)")
}
}
}
}
|
dotty-staging/scalatest
|
scalatest-test/src/test/scala/org/scalatest/matchers/BeMatcherSpec.scala
|
Scala
|
apache-2.0
| 1,134
|
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution.atomic
import monix.execution.atomic.PaddingStrategy.NoPadding
import scala.annotation.tailrec
import java.lang.Double.{doubleToLongBits, longBitsToDouble}
import monix.execution.internal.atomic.{BoxedLong, Factory}
/** Atomic references wrapping `Double` values.
*
* Note that the equality test in `compareAndSet` is value based,
* since `Double` is a primitive.
*/
final class AtomicDouble private (val ref: BoxedLong)
extends AtomicNumber[Double] {
def get(): Double = longBitsToDouble(ref.volatileGet())
def set(update: Double): Unit = ref.volatileSet(doubleToLongBits(update))
def lazySet(update: Double): Unit = ref.lazySet(doubleToLongBits(update))
def compareAndSet(expect: Double, update: Double): Boolean = {
val expectLong = doubleToLongBits(expect)
val updateLong = doubleToLongBits(update)
ref.compareAndSet(expectLong, updateLong)
}
def getAndSet(update: Double): Double = {
longBitsToDouble(ref.getAndSet(doubleToLongBits(update)))
}
@tailrec
def increment(v: Int = 1): Unit = {
val current = get
val update = incrementOp(current, v)
if (!compareAndSet(current, update))
increment(v)
}
@tailrec
def add(v: Double): Unit = {
val current = get
val update = plusOp(current, v)
if (!compareAndSet(current, update))
add(v)
}
@tailrec
def incrementAndGet(v: Int = 1): Double = {
val current = get
val update = incrementOp(current, v)
if (!compareAndSet(current, update))
incrementAndGet(v)
else
update
}
@tailrec
def addAndGet(v: Double): Double = {
val current = get
val update = plusOp(current, v)
if (!compareAndSet(current, update))
addAndGet(v)
else
update
}
@tailrec
def getAndIncrement(v: Int = 1): Double = {
val current = get
val update = incrementOp(current, v)
if (!compareAndSet(current, update))
getAndIncrement(v)
else
current
}
@tailrec
def getAndAdd(v: Double): Double = {
val current = get
val update = plusOp(current, v)
if (!compareAndSet(current, update))
getAndAdd(v)
else
current
}
@tailrec
def subtract(v: Double): Unit = {
val current = get
val update = minusOp(current, v)
if (!compareAndSet(current, update))
subtract(v)
}
@tailrec
def subtractAndGet(v: Double): Double = {
val current = get
val update = minusOp(current, v)
if (!compareAndSet(current, update))
subtractAndGet(v)
else
update
}
@tailrec
def getAndSubtract(v: Double): Double = {
val current = get
val update = minusOp(current, v)
if (!compareAndSet(current, update))
getAndSubtract(v)
else
current
}
def decrement(v: Int = 1): Unit = increment(-v)
def decrementAndGet(v: Int = 1): Double = incrementAndGet(-v)
def getAndDecrement(v: Int = 1): Double = getAndIncrement(-v)
private[this] def plusOp(a: Double, b: Double): Double = a + b
private[this] def minusOp(a: Double, b: Double): Double = a - b
private[this] def incrementOp(a: Double, b: Int): Double = a + b
}
/** @define createDesc Constructs an [[AtomicDouble]] reference, allowing
* for fine-tuning of the created instance.
*
* A [[PaddingStrategy]] can be provided in order to counter
* the "false sharing" problem.
*
* Note that for ''Scala.js'' we aren't applying any padding,
* as it doesn't make much sense, since Javascript execution
* is single threaded, but this builder is provided for
* syntax compatibility anyway across the JVM and Javascript
* and we never know how Javascript engines will evolve.
*/
object AtomicDouble {
/** Builds an [[AtomicDouble]] reference.
*
* @param initialValue is the initial value with which to initialize the atomic
*/
def apply(initialValue: Double): AtomicDouble =
withPadding(initialValue, NoPadding)
/** $createDesc
*
* @param initialValue is the initial value with which to initialize the atomic
* @param padding is the [[PaddingStrategy]] to apply
*/
def withPadding(initialValue: Double, padding: PaddingStrategy): AtomicDouble =
create(initialValue, padding, allowPlatformIntrinsics = true)
/** $createDesc
*
* Also this builder on top Java 8 also allows for turning off the
* Java 8 intrinsics, thus forcing usage of CAS-loops for
* `getAndSet` and for `getAndAdd`.
*
* @param initialValue is the initial value with which to initialize the atomic
* @param padding is the [[PaddingStrategy]] to apply
* @param allowPlatformIntrinsics is a boolean parameter that specifies whether
* the instance is allowed to use the Java 8 optimized operations
* for `getAndSet` and for `getAndAdd`
*/
def create(initialValue: Double, padding: PaddingStrategy, allowPlatformIntrinsics: Boolean): AtomicDouble = {
new AtomicDouble(Factory.newBoxedLong(
doubleToLongBits(initialValue),
boxStrategyToPaddingStrategy(padding),
true, // allowIntrinsics
allowPlatformIntrinsics
))
}
/** $createDesc
*
* This builder guarantees to construct a safe atomic reference that
* does not make use of `sun.misc.Unsafe`. On top of platforms that
* don't support it, notably some versions of Android or on top of
* the upcoming Java 9, this might be desirable.
*
* NOTE that explicit usage of this builder is not usually necessary
* because [[create]] can auto-detect whether the underlying platform
* supports `sun.misc.Unsafe` and if it does, then its usage is
* recommended, because the "safe" atomic instances have overhead.
*
* @param initialValue is the initial value with which to initialize the atomic
* @param padding is the [[PaddingStrategy]] to apply
*/
def safe(initialValue: Double, padding: PaddingStrategy): AtomicDouble = {
new AtomicDouble(Factory.newBoxedLong(
doubleToLongBits(initialValue),
boxStrategyToPaddingStrategy(padding),
false, // allowUnsafe
false // allowJava8Intrinsics
))
}
}
|
Wogan/monix
|
monix-execution/jvm/src/main/scala/monix/execution/atomic/AtomicDouble.scala
|
Scala
|
apache-2.0
| 6,856
|
package com.criteo.dev.cluster.utils.ddl
import scala.util.parsing.input.Position
object DDLParser extends BaseParser with CreateTableParser {
/**
* Parse a DDL statement
*
* @param in input string
* @return Either a tuple of (error message, error position), or parsed statement
*/
def apply(in: String): Either[(String, Position), Statement] = {
parse(statement, in) match {
case Success(result, _) => Right(result)
case Failure(msg, input) => Left((msg, input.pos))
case Error(msg, input) => Left((msg, input.pos))
}
}
def statement: Parser[Statement] = createTable
}
|
criteo/berilia
|
src/main/scala/com/criteo/dev/cluster/utils/ddl/DDLParser.scala
|
Scala
|
apache-2.0
| 628
|
package com.sfxcode.sapphire.core
import java.util.ResourceBundle
import scala.util.Try
case class ResourceBundleHolder(underlying: ResourceBundle) extends AnyVal {
def message(key: String, params: Any*): String =
Try(underlying.getString(key))
.map(f => format(f, params: _*))
.getOrElse(s"!!--$key--!!")
private def format(s: String, params: Any*): String =
params.zipWithIndex.foldLeft(s) {
case (res, (value, index)) => {
res.replace("{" + index + "}", value.toString)
}
}
}
|
sfxcode/sapphire-core
|
src/main/scala/com/sfxcode/sapphire/core/ResourceBundleHolder.scala
|
Scala
|
apache-2.0
| 533
|
package com.arcusys.valamis.course.service
import com.arcusys.learn.liferay.LiferayClasses._
import com.arcusys.learn.liferay.services._
import com.arcusys.valamis.certificate.storage.CertificateRepository
import com.arcusys.valamis.course.model.{CourseExtended, CourseInfo}
import com.arcusys.valamis.course.{CourseMemberService, api}
import com.arcusys.valamis.course.model.CourseMembershipType.CourseMembershipType
import com.arcusys.valamis.course.storage.{CourseCertificateRepository, CourseExtendedRepository}
import com.arcusys.valamis.model.{RangeResult, SkipTake}
import com.arcusys.valamis.ratings.RatingService
import com.arcusys.valamis.ratings.model.Rating
import com.arcusys.valamis.tag.TagService
import org.joda.time.DateTime
import scala.collection.JavaConverters._
abstract class CourseServiceImpl extends api.CourseServiceImpl with CourseService{
private lazy val categoryService = AssetCategoryLocalServiceHelper
private lazy val ratingService = new RatingService[LGroup]
def courseRepository: CourseExtendedRepository
def courseTagService: TagService[LGroup]
def courseMemberService: CourseMemberService
def courseCertificateRepository: CourseCertificateRepository
val isMember = (gr: LGroup, user: LUser) => user.getGroups.asScala.exists(_.getGroupId == gr.getGroupId)
private val notGuestSite = (gr: LGroup) => gr.getFriendlyURL != "/guest"
private val hasCorrectType = (gr: LGroup) =>
gr.getType == GroupLocalServiceHelper.TYPE_SITE_OPEN ||
gr.getType == GroupLocalServiceHelper.TYPE_SITE_RESTRICTED ||
gr.getType == GroupLocalServiceHelper.TYPE_SITE_PRIVATE
private val namePatternFits = (gr: LGroup, filter: String) =>
filter.isEmpty ||
gr.getDescriptiveName.toLowerCase.contains(filter)
override def getAll(companyId: Long, skipTake: Option[SkipTake], namePattern: String, sortAscDirection: Boolean): RangeResult[CourseInfo] = {
var courses = getByCompanyId(companyId)
if (!namePattern.isEmpty) {
courses = courses.filter(_.getDescriptiveName.toLowerCase.contains(namePattern.toLowerCase))
}
val total = courses.length
if (!sortAscDirection) courses = courses.reverse
for (SkipTake(skip, take) <- skipTake)
courses = courses.slice(skip, skip + take)
val coursesInfo = attachCourseInfo(courses)
RangeResult(total, coursesInfo)
}
override def getAllForUser(companyId: Long,
user: Option[LUser],
skipTake: Option[SkipTake],
namePattern: String,
sortAscDirection: Boolean,
isActive: Option[Boolean] = None,
withGuestSite: Boolean = false): RangeResult[CourseInfo] = {
val namePatternLC = namePattern.toLowerCase
val userGroupIds = user.map(_.getUserGroupIds.toSeq).getOrElse(Seq())
val allowedToSee = (gr: LGroup) =>
gr.getType != GroupLocalServiceHelper.TYPE_SITE_PRIVATE ||
userGroupIds.isEmpty ||
userGroupIds.contains(gr.getGroupId)
val isVisible = (gr: LGroup) => isActive.isEmpty || (gr.isActive == isActive.get)
val allFilters = (gr: LGroup) =>
hasCorrectType(gr) &&
notPersonalSite(gr) &&
allowedToSee(gr) &&
namePatternFits(gr, namePatternLC) &&
(withGuestSite || notGuestSite(gr)) &&
isVisible(gr)
var courses = getByCompanyId(companyId = companyId, skipCheckActive = true).filter(allFilters)
val total = courses.length
if (!sortAscDirection) courses = courses.reverse
for (SkipTake(skip, take) <- skipTake)
courses = courses.slice(skip, skip + take)
val coursesInfo = attachCourseInfo(courses)
RangeResult(total, coursesInfo)
}
override def getNotMemberVisible(companyId: Long,
user: LUser,
skipTake: Option[SkipTake],
namePattern: String,
sortAscDirection: Boolean,
withGuestSite: Boolean = false): RangeResult[CourseInfo] = {
val namePatternLC = namePattern.toLowerCase
val userOrganizations = user.getOrganizations.asScala
val allOrganizations = OrganizationLocalServiceHelper.getOrganizations()
val organizationsNotUser = allOrganizations.filterNot { o => userOrganizations.contains(o) }
val organizationsGroups = organizationsNotUser.toList.map(_.getGroup).map(new LGroup(_))
val allowedToSee = (gr: LGroup) => gr.getType != GroupLocalServiceHelper.TYPE_SITE_PRIVATE
val allFilters = (gr: LGroup) =>
hasCorrectType(gr) &&
notPersonalSite(gr) &&
!isMember(gr, user) &&
namePatternFits(gr, namePatternLC) &&
(withGuestSite || notGuestSite(gr)) &&
allowedToSee(gr) &&
isVisible(gr)
val courses = (getByCompanyId(companyId) ++ organizationsGroups).filter(allFilters)
var coursesInfo = attachCourseInfo(courses).filter(course =>
isAvailable(course.beginDate, course.endDate)
)
if (!sortAscDirection) coursesInfo = coursesInfo.reverse
val total = coursesInfo.length
for (SkipTake(skip, take) <- skipTake)
coursesInfo = coursesInfo.slice(skip, skip + take)
RangeResult(total, coursesInfo)
}
override def getByIdCourseInfo(courseId: Long): CourseInfo = {
CourseInfo(groupService.getGroup(courseId))
}
override def getSitesByUserId(userId: Long, skipTake: Option[SkipTake], sortAsc: Boolean = true): RangeResult[CourseInfo] = {
val groups = getSitesByUserId(userId)
val result = getSortedAndOrdered(groups, skipTake, sortAsc)
val coursesInfo = attachCourseInfo(result)
RangeResult(groups.size, coursesInfo)
}
override def getByUserAndName(user: LUser,
skipTake: Option[SkipTake],
namePattern: Option[String],
sortAsc: Boolean,
withGuestSite: Boolean = false): RangeResult[CourseInfo] = {
val namePatternLC = namePattern.getOrElse("").toLowerCase
val userOrganizations = user.getOrganizations
val organizationGroups = userOrganizations.asScala.map(o => new LGroup(o.getGroup))
val groups = (GroupLocalServiceHelper.getSiteGroupsByUser(user)
.filter(isMember(_, user)) ++ organizationGroups)
.filter(namePatternFits(_, namePatternLC))
.filter(hasCorrectType)
.filter(gr => (withGuestSite || notGuestSite(gr)))
.filter(notPersonalSite)
.filter(isVisible)
val result = getSortedAndOrdered(groups, skipTake, sortAsc)
val coursesInfo = attachCourseInfo(result)
RangeResult(groups.size, coursesInfo)
}
override def addCourse(companyId: Long,
userId: Long,
title: String,
description: Option[String],
friendlyUrl: String,
membershipType: CourseMembershipType,
isActive: Boolean,
tags: Seq[String],
longDescription: Option[String],
userLimit: Option[Int],
beginDate: Option[DateTime],
endDate: Option[DateTime],
themeId: Option[String],
templateId: Option[Long]): CourseInfo = {
val course = GroupLocalServiceHelper.addPublicSite(
userId,
title,
description,
formatFrienlyUrl(friendlyUrl),
membershipType.id,
isActive,
tags,
companyId)
courseRepository.create(CourseExtended(course.getGroupId, longDescription, userLimit, beginDate, endDate))
updateTags(companyId, course, tags)
templateId match {
case Some(id) => createSiteBasedOnTemplate(course.getGroupId, id, themeId)
case _ => createSiteWithOnePage(course.getGroupId, userId, themeId)
}
CourseInfo(course)
}
override def delete(courseId: Long): Unit = {
groupService.deleteGroup(courseId)
courseRepository.delete(courseId)
}
override def rateCourse(courseId: Long, userId: Long, score: Double) = {
ratingService.updateRating(userId, score, courseId)
}
override def deleteCourseRating(courseId: Long, userId: Long) = {
ratingService.deleteRating(userId, courseId)
}
override def getRating(courseId: Long, userId: Long): Rating = {
ratingService.getRating(userId, courseId)
}
override def getLogoUrl(courseId: Long) = {
val layoutSet = LayoutSetLocalServiceHelper.getLayoutSet(courseId, true)
if (layoutSet.isLogo) "/image/layout_set_logo?img_id=" + layoutSet.getLogoId
else ""
}
override def setLogo(courseId: Long, content: Array[Byte]) = {
LayoutSetLocalServiceHelper.updateLogo(courseId = courseId, privateLayout = true, logo = true, content = content)
LayoutSetLocalServiceHelper.updateLogo(courseId = courseId, privateLayout = false, logo = true, content = content)
}
override def deleteLogo(courseId: Long) = {
LayoutSetLocalServiceHelper.updateLogo(courseId = courseId, privateLayout = true, logo = false, content = Array())
LayoutSetLocalServiceHelper.updateLogo(courseId = courseId, privateLayout = false, logo = false, content = Array())
}
override def hasLogo(courseId: Long): Boolean = LayoutSetLocalServiceHelper.getLayoutSet(courseId, true).isLogo
override def update(courseId: Long,
companyId: Long,
title: String,
description: Option[String],
friendlyUrl: String,
membershipType: Option[CourseMembershipType],
isActive: Option[Boolean],
tags: Seq[String],
longDescription: Option[String],
userLimit: Option[Int],
beginDate: Option[DateTime],
endDate: Option[DateTime],
themeId: Option[String]): CourseInfo = {
val originalGroup = groupService.getGroup(courseId)
originalGroup.setName(title)
originalGroup.setDescription(description.getOrElse(""))
originalGroup.setFriendlyURL(formatFrienlyUrl(friendlyUrl))
if (membershipType.isDefined) originalGroup.setType(membershipType.get.id)
originalGroup.setActive(isActive.getOrElse(originalGroup.isActive))
updateTags(companyId, originalGroup, tags)
val course = groupService.updateGroup(originalGroup)
val courseExtended = if (courseRepository.isExist(courseId)) {
courseRepository.update(CourseExtended(courseId, longDescription, userLimit, beginDate, endDate))
} else {
courseRepository.create(CourseExtended(courseId, longDescription, userLimit, beginDate, endDate))
}
setTheme(courseId, themeId)
CourseInfo(course)
}
override def getTags(courseId: Long): Seq[LAssetCategory] = categoryService.getCourseCategories(courseId)
override def getTheme(courseId: Long): LTheme = {
LayoutSetLocalServiceHelper.getLayoutSet(courseId, false).getTheme
}
override def setTheme(courseId: Long, themeId: Option[String]): Unit = {
themeId match {
case Some(id) => GroupLocalServiceHelper.setThemeToLayout(courseId, id)
case _ => GroupLocalServiceHelper.setThemeToLayout(courseId, null)
}
}
override def isAvailableNow(beginDate: Option[DateTime], endDate: Option[DateTime]): Boolean = {
(beginDate, endDate) match {
case (Some(beginDate), Some(endDate)) =>
beginDate.isBeforeNow && endDate.isAfterNow
case _ => true
}
}
override def isExist(courseId: Long): Boolean = {
GroupLocalServiceHelper.fetchGroup(courseId).nonEmpty
}
private def isAvailable(beginDate: Option[DateTime], endDate: Option[DateTime]): Boolean = {
(beginDate, endDate) match {
case (Some(beginDate), Some(endDate)) => endDate.isAfterNow
case _ => true
}
}
private def updateTags(companyId: Long, course: LGroup, tags: Seq[String]): Unit = {
val tagIds = courseTagService.getOrCreateTagIds(tags, companyId)
categoryService.getCourseEntryIds(course.getGroupId)
.foreach(courseTagService.setTags(_, tagIds))
}
private def attachCourseInfo(courses: Seq[LGroup]) = {
courses.map((course) => {
val courseInfo = courseRepository.getById(course.getGroupId)
val userCount = courseMemberService.getCountUsers(course.getGroupId)
val courseWithInfo = courseInfo match {
case Some(x) => CourseInfo(course).copy(
longDescription = x.longDescription,
userLimit = x.userLimit,
beginDate = x.beginDate,
endDate = x.endDate)
case None => CourseInfo(course)
}
courseWithInfo.copy(userCount = Some(userCount))
})
}
private def getSortedAndOrdered(courses: Seq[LGroup], skipTake: Option[SkipTake], sortAsc: Boolean = true) = {
val ordered = if (sortAsc) {
courses.sortBy(_.getDescriptiveName)
}
else {
courses.sortBy(_.getDescriptiveName).reverse
}
skipTake match {
case Some(SkipTake(skip, take)) => ordered.slice(skip, skip + take)
case _ => ordered
}
}
private def createSiteBasedOnTemplate(courseId: Long, templateId: Long, themeId: Option[String]): Unit = {
createSiteBasedOnTemplate(courseId, templateId, themeId, false)
createSiteBasedOnTemplate(courseId, templateId, themeId, true)
}
private def createSiteBasedOnTemplate(courseId: Long, templateId: Long, themeId: Option[String], privateLayout: Boolean): Unit = {
val layoutSet = LayoutSetLocalServiceHelper.getLayoutSet(courseId, privateLayout)
val template = LayoutSetPrototypeServiceHelper.getLayoutSetPrototype(templateId)
layoutSet.setLayoutSetPrototypeUuid(template.getUuid)
layoutSet.setLayoutSetPrototypeLinkEnabled(true)
layoutSet.setPageCount(1)
themeId.map(layoutSet.setThemeId(_))
layoutSet.setColorSchemeId(null)
LayoutSetLocalServiceHelper.updateLayoutSet(layoutSet)
}
private def createSiteWithOnePage(courseId: Long, userId: Long, themeId: Option[String]): Unit = {
GroupLocalServiceHelper.addLayout(courseId, userId, "Home", "/home", true)
GroupLocalServiceHelper.addLayout(courseId, userId, "Home", "/home", false)
themeId.map(GroupLocalServiceHelper.setThemeToLayout(courseId, _))
}
private def formatFrienlyUrl(url: String): String = {
if (url.startsWith("/")) url else "/" + url
}
}
|
arcusys/Valamis
|
valamis-course/src/main/scala/com/arcusys/valamis/course/service/CourseServiceImpl.scala
|
Scala
|
gpl-3.0
| 14,602
|
package org.bitcoins.core.script.interpreter
import org.bitcoins.core.consensus.Consensus
import org.bitcoins.core.crypto._
import org.bitcoins.core.currency.{ CurrencyUnit, CurrencyUnits }
import org.bitcoins.core.protocol.CompactSizeUInt
import org.bitcoins.core.protocol.script._
import org.bitcoins.core.protocol.transaction.{ BaseTransaction, EmptyTransactionOutPoint, Transaction, WitnessTransaction }
import org.bitcoins.core.script._
import org.bitcoins.core.script.arithmetic._
import org.bitcoins.core.script.bitwise._
import org.bitcoins.core.script.constant.{ ScriptToken, _ }
import org.bitcoins.core.script.control._
import org.bitcoins.core.script.crypto._
import org.bitcoins.core.script.flag._
import org.bitcoins.core.script.locktime.{ LockTimeInterpreter, OP_CHECKLOCKTIMEVERIFY, OP_CHECKSEQUENCEVERIFY }
import org.bitcoins.core.script.reserved._
import org.bitcoins.core.script.result._
import org.bitcoins.core.script.splice._
import org.bitcoins.core.script.stack._
import org.bitcoins.core.util.{ BitcoinSLogger, BitcoinSUtil, BitcoinScriptUtil }
import scala.annotation.tailrec
import scala.util.{ Failure, Success, Try }
/**
* Created by chris on 1/6/16.
*/
sealed abstract class ScriptInterpreter {
private def logger = BitcoinSLogger.logger
/**
* Currently bitcoin core limits the maximum number of non-push operations per script
* to 201
*/
private lazy val maxScriptOps = 201
/** We cannot push an element larger than 520 bytes onto the stack */
private lazy val maxPushSize = 520
/**
* Runs an entire script though our script programming language and
* returns a [[ScriptResult]] indicating if the script was valid, or if not what error it encountered
*/
def run(program: PreExecutionScriptProgram): ScriptResult = {
val scriptSig = program.txSignatureComponent.scriptSignature
val scriptPubKey = program.txSignatureComponent.scriptPubKey
val flags = program.flags
val p2shEnabled = ScriptFlagUtil.p2shEnabled(flags)
val segwitEnabled = ScriptFlagUtil.segWitEnabled(flags)
val executedProgram: ExecutedScriptProgram = if (ScriptFlagUtil.requirePushOnly(flags)
&& !BitcoinScriptUtil.isPushOnly(program.script)) {
logger.error("We can only have push operations inside of the script sig when the SIGPUSHONLY flag is set")
ScriptProgram(program, ScriptErrorSigPushOnly)
} else if (scriptSig.isInstanceOf[P2SHScriptSignature] && p2shEnabled &&
!BitcoinScriptUtil.isPushOnly(scriptSig.asm)) {
logger.error("P2SH scriptSigs are required to be push only by definition - see BIP16, got: " + scriptSig.asm)
ScriptProgram(program, ScriptErrorSigPushOnly)
} else {
val scriptSigExecutedProgram = loop(program, 0)
val t = scriptSigExecutedProgram.txSignatureComponent
val scriptPubKeyProgram = ScriptProgram(t, scriptSigExecutedProgram.stack, t.scriptPubKey.asm,
t.scriptPubKey.asm)
val scriptPubKeyExecutedProgram: ExecutedScriptProgram = loop(scriptPubKeyProgram, 0)
if (scriptSigExecutedProgram.error.isDefined) {
scriptSigExecutedProgram
} else if (scriptPubKeyExecutedProgram.error.isDefined || scriptPubKeyExecutedProgram.stackTopIsFalse) {
scriptPubKeyExecutedProgram
} else {
scriptPubKey match {
case witness: WitnessScriptPubKey =>
//TODO: remove .get here
if (segwitEnabled) executeSegWitScript(scriptPubKeyExecutedProgram, witness).get
else scriptPubKeyExecutedProgram
case p2sh: P2SHScriptPubKey =>
if (p2shEnabled) executeP2shScript(scriptSigExecutedProgram, program, p2sh)
else scriptPubKeyExecutedProgram
case _: P2PKHScriptPubKey | _: P2PKScriptPubKey | _: MultiSignatureScriptPubKey | _: CSVScriptPubKey
| _: CLTVScriptPubKey | _: NonStandardScriptPubKey | _: WitnessCommitment
| _: EscrowTimeoutScriptPubKey | EmptyScriptPubKey =>
scriptPubKeyExecutedProgram
}
}
}
logger.debug("Executed Script Program: " + executedProgram)
if (executedProgram.error.isDefined) executedProgram.error.get
else if (hasUnexpectedWitness(program)) {
//note: the 'program' value we pass above is intentional, we need to check the original program
//as the 'executedProgram' may have had the scriptPubKey value changed to the rebuilt ScriptPubKey of the witness program
ScriptErrorWitnessUnexpected
} else if (executedProgram.stackTopIsTrue && flags.contains(ScriptVerifyCleanStack)) {
//require that the stack after execution has exactly one element on it
if (executedProgram.stack.size == 1) ScriptOk
else ScriptErrorCleanStack
} else if (executedProgram.stackTopIsTrue) ScriptOk
else ScriptErrorEvalFalse
}
/**
* P2SH scripts are unique in their evaluation, first the scriptSignature must be added to the stack, next the
* p2sh scriptPubKey must be run to make sure the serialized redeem script hashes to the value found in the p2sh
* scriptPubKey, then finally the serialized redeemScript is decoded and run with the arguments in the p2sh script signature
* a p2sh script returns true if both of those intermediate steps evaluate to true
*
* @param scriptPubKeyExecutedProgram the program with the script signature pushed onto the stack
* @param originalProgram the original program, used for setting errors & checking that the original script signature contains push only tokens
* @param p2shScriptPubKey the p2sh scriptPubKey that contains the value the redeemScript must hash to
* @return the executed program
*/
private def executeP2shScript(scriptPubKeyExecutedProgram: ExecutedScriptProgram, originalProgram: ScriptProgram, p2shScriptPubKey: P2SHScriptPubKey): ExecutedScriptProgram = {
/** Helper function to actually run a p2sh script */
def run(p: ExecutedScriptProgram, stack: Seq[ScriptToken], s: ScriptPubKey): ExecutedScriptProgram = {
logger.debug("Running p2sh script: " + stack)
val p2shRedeemScriptProgram = ScriptProgram(p.txSignatureComponent, stack.tail,
s.asm)
if (ScriptFlagUtil.requirePushOnly(p2shRedeemScriptProgram.flags) && !BitcoinScriptUtil.isPushOnly(s.asm)) {
logger.error("p2sh redeem script must be push only operations whe SIGPUSHONLY flag is set")
ScriptProgram(p2shRedeemScriptProgram, ScriptErrorSigPushOnly)
} else loop(p2shRedeemScriptProgram, 0)
}
val scriptSig = scriptPubKeyExecutedProgram.txSignatureComponent.scriptSignature
val scriptSigAsm: Seq[ScriptToken] = scriptSig.asm
//need to check if the scriptSig is push only as required by bitcoin core
//https://github.com/bitcoin/bitcoin/blob/528472111b4965b1a99c4bcf08ac5ec93d87f10f/src/script/interpreter.cpp#L1419
if (!BitcoinScriptUtil.isPushOnly(scriptSigAsm)) {
ScriptProgram(scriptPubKeyExecutedProgram, ScriptErrorSigPushOnly)
} else if (scriptPubKeyExecutedProgram.error.isDefined) {
scriptPubKeyExecutedProgram
} else {
scriptPubKeyExecutedProgram.stackTopIsTrue match {
case true =>
logger.debug("Hashes matched between the p2shScriptSignature & the p2shScriptPubKey")
//we need to run the deserialized redeemScript & the scriptSignature without the serialized redeemScript
val stack = scriptPubKeyExecutedProgram.stack
val redeemScriptBytes = stack.head.bytes
val c = CompactSizeUInt.calculateCompactSizeUInt(redeemScriptBytes)
val redeemScript = ScriptPubKey(c.bytes ++ redeemScriptBytes)
redeemScript match {
case w: WitnessScriptPubKey =>
val pushOp = BitcoinScriptUtil.calculatePushOp(redeemScriptBytes)
val expectedScriptBytes = pushOp.flatMap(_.bytes) ++ redeemScriptBytes
val flags = scriptPubKeyExecutedProgram.flags
val segwitEnabled = ScriptFlagUtil.segWitEnabled(flags)
if (segwitEnabled && (scriptSig.asmBytes == expectedScriptBytes)) {
// The scriptSig must be _exactly_ a single push of the redeemScript. Otherwise we
// reintroduce malleability.
logger.debug("redeem script was witness script pubkey, segwit was enabled, scriptSig was single push of redeemScript")
//TODO: remove .get here
executeSegWitScript(scriptPubKeyExecutedProgram, w).get
} else if (segwitEnabled && (scriptSig.asmBytes != expectedScriptBytes)) {
logger.error("Segwit was enabled, but p2sh redeem script was malleated")
logger.error("ScriptSig bytes: " + scriptSig.hex)
logger.error("expected scriptsig bytes: " + BitcoinSUtil.encodeHex(expectedScriptBytes))
ScriptProgram(scriptPubKeyExecutedProgram, ScriptErrorWitnessMalleatedP2SH)
} else {
logger.warn("redeem script was witness script pubkey, segwit was NOT enabled")
//treat the segwit scriptpubkey as any other redeem script
run(scriptPubKeyExecutedProgram, stack, w)
}
case s @ (_: P2SHScriptPubKey | _: P2PKHScriptPubKey | _: P2PKScriptPubKey | _: MultiSignatureScriptPubKey
| _: CLTVScriptPubKey | _: CSVScriptPubKey | _: NonStandardScriptPubKey | _: WitnessCommitment
| _: EscrowTimeoutScriptPubKey | EmptyScriptPubKey) =>
logger.debug("redeemScript: " + s.asm)
run(scriptPubKeyExecutedProgram, stack, s)
}
case false =>
logger.warn("P2SH scriptPubKey hash did not match the hash for the serialized redeemScript")
scriptPubKeyExecutedProgram
}
}
}
/**
* Runs a segwit script through our interpreter, mimics this functionality in bitcoin core:
* [[https://github.com/bitcoin/bitcoin/blob/528472111b4965b1a99c4bcf08ac5ec93d87f10f/src/script/interpreter.cpp#L1441-L1452]]
* @param scriptPubKeyExecutedProgram the program with the [[ScriptPubKey]] executed
* @return
*/
private def executeSegWitScript(scriptPubKeyExecutedProgram: ExecutedScriptProgram, witnessScriptPubKey: WitnessScriptPubKey): Try[ExecutedScriptProgram] = {
scriptPubKeyExecutedProgram.txSignatureComponent match {
case b: BaseTxSigComponent =>
val scriptSig = scriptPubKeyExecutedProgram.txSignatureComponent.scriptSignature
if (scriptSig != EmptyScriptSignature && !b.scriptPubKey.isInstanceOf[P2SHScriptPubKey]) {
Success(ScriptProgram(scriptPubKeyExecutedProgram, ScriptErrorWitnessMalleated))
} else {
witnessScriptPubKey.witnessVersion match {
case WitnessVersion0 =>
logger.error("Cannot verify witness program with a BaseTxSigComponent")
Success(ScriptProgram(scriptPubKeyExecutedProgram, ScriptErrorWitnessProgramWitnessEmpty))
case UnassignedWitness(_) =>
evaluateUnassignedWitness(b)
}
}
case w: WitnessTxSigComponent =>
val scriptSig = scriptPubKeyExecutedProgram.txSignatureComponent.scriptSignature
val (witnessVersion, witnessProgram) = (witnessScriptPubKey.witnessVersion, witnessScriptPubKey.witnessProgram)
val witness = w.witness
//scriptsig must be empty if we have raw p2wsh
//if script pubkey is a P2SHScriptPubKey then we have P2SH(P2WSH)
if (scriptSig != EmptyScriptSignature && !w.scriptPubKey.isInstanceOf[P2SHScriptPubKey]) {
Success(ScriptProgram(scriptPubKeyExecutedProgram, ScriptErrorWitnessMalleated))
} else if (witness.stack.exists(_.size > maxPushSize)) {
Success(ScriptProgram(scriptPubKeyExecutedProgram, ScriptErrorPushSize))
} else {
verifyWitnessProgram(witnessVersion, witness, witnessProgram, w)
}
case _: WitnessTxSigComponentRebuilt =>
Failure(new IllegalArgumentException("Cannot have a rebuild witness tx sig component here, the witness tx sigcomponent is rebuilt in verifyWitnessProgram"))
}
}
/**
* Verifies a segregated witness program by running it through the interpreter
* [[https://github.com/bitcoin/bitcoin/blob/f8528134fc188abc5c7175a19680206964a8fade/src/script/interpreter.cpp#L1302]]
*/
private def verifyWitnessProgram(witnessVersion: WitnessVersion, scriptWitness: ScriptWitness, witnessProgram: Seq[ScriptToken],
wTxSigComponent: WitnessTxSigComponent): Try[ExecutedScriptProgram] = {
/** Helper function to run the post segwit execution checks */
def postSegWitProgramChecks(evaluated: ExecutedScriptProgram): ExecutedScriptProgram = {
logger.debug("Stack after evaluating witness: " + evaluated.stack)
if (evaluated.error.isDefined) evaluated
else if (evaluated.stack.size != 1 || evaluated.stackTopIsFalse) ScriptProgram(evaluated, ScriptErrorEvalFalse)
else evaluated
}
witnessVersion match {
case WitnessVersion0 =>
val either: Either[(Seq[ScriptToken], ScriptPubKey), ScriptError] = witnessVersion.rebuild(scriptWitness, witnessProgram)
either match {
case Left((stack, scriptPubKey)) =>
val newWTxSigComponent = rebuildWTxSigComponent(wTxSigComponent, scriptPubKey)
val newProgram = newWTxSigComponent.map(comp => ScriptProgram(comp, stack, scriptPubKey.asm, scriptPubKey.asm, Nil))
val evaluated = newProgram.map(p => loop(p, 0))
evaluated.map(e => postSegWitProgramChecks(e))
case Right(err) =>
val program = ScriptProgram(wTxSigComponent, Nil, Nil, Nil)
Success(ScriptProgram(program, err))
}
case UnassignedWitness(_) =>
evaluateUnassignedWitness(wTxSigComponent)
}
}
/**
* The execution loop for a script
*
* @param program the program whose script needs to be evaluated
* @return program the final state of the program after being evaluated by the interpreter
*/
@tailrec
private def loop(program: ScriptProgram, opCount: Int): ExecutedScriptProgram = {
logger.debug("Stack: " + program.stack)
logger.debug("Script: " + program.script)
if (opCount > maxScriptOps && !program.isInstanceOf[ExecutedScriptProgram]) {
logger.error("We have reached the maximum amount of script operations allowed")
logger.error("Here are the remaining operations in the script: " + program.script)
loop(ScriptProgram(program, ScriptErrorOpCount), opCount)
} else if (program.script.flatMap(_.bytes).size > 10000 && !program.isInstanceOf[ExecutedScriptProgram]) {
logger.error("We cannot run a script that is larger than 10,000 bytes")
program match {
case p: PreExecutionScriptProgram =>
loop(ScriptProgram(ScriptProgram.toExecutionInProgress(p), ScriptErrorScriptSize), opCount)
case _: ExecutionInProgressScriptProgram | _: ExecutedScriptProgram =>
loop(ScriptProgram(program, ScriptErrorScriptSize), opCount)
}
} else {
program match {
case p: PreExecutionScriptProgram => loop(ScriptProgram.toExecutionInProgress(p, Some(p.stack)), opCount)
case p: ExecutedScriptProgram =>
val countedOps = program.originalScript.map(BitcoinScriptUtil.countsTowardsScriptOpLimit(_)).count(_ == true)
logger.debug("Counted ops: " + countedOps)
if (countedOps > maxScriptOps && p.error.isEmpty) {
loop(ScriptProgram(p, ScriptErrorOpCount), opCount)
} else p
case p: ExecutionInProgressScriptProgram =>
p.script match {
//if at any time we see that the program is not valid
//cease script execution
case _ if p.script.intersect(Seq(OP_VERIF, OP_VERNOTIF)).nonEmpty =>
logger.error("Script is invalid even when a OP_VERIF or OP_VERNOTIF occurs in an unexecuted OP_IF branch")
loop(ScriptProgram(p, ScriptErrorBadOpCode), opCount)
//disabled splice operation
case _ if p.script.intersect(Seq(OP_CAT, OP_SUBSTR, OP_LEFT, OP_RIGHT)).nonEmpty =>
logger.error("Script is invalid because it contains a disabled splice operation")
loop(ScriptProgram(p, ScriptErrorDisabledOpCode), opCount)
//disabled bitwise operations
case _ if p.script.intersect(Seq(OP_INVERT, OP_AND, OP_OR, OP_XOR)).nonEmpty =>
logger.error("Script is invalid because it contains a disabled bitwise operation")
loop(ScriptProgram(p, ScriptErrorDisabledOpCode), opCount)
//disabled arithmetic operations
case _ if p.script.intersect(Seq(OP_MUL, OP_2MUL, OP_DIV, OP_2DIV, OP_MOD, OP_LSHIFT, OP_RSHIFT)).nonEmpty =>
logger.error("Script is invalid because it contains a disabled arithmetic operation")
loop(ScriptProgram(p, ScriptErrorDisabledOpCode), opCount)
//program cannot contain a push operation > 520 bytes
case _ if (p.script.exists(token => token.bytes.size > maxPushSize)) =>
logger.error("We have a script constant that is larger than 520 bytes, this is illegal: " + p.script)
loop(ScriptProgram(p, ScriptErrorPushSize), opCount)
//program stack size cannot be greater than 1000 elements
case _ if ((p.stack.size + p.altStack.size) > 1000) =>
logger.error("We cannot have a stack + alt stack size larger than 1000 elements")
loop(ScriptProgram(p, ScriptErrorStackSize), opCount)
//stack operations
case OP_DUP :: t => loop(StackInterpreter.opDup(p), calcOpCount(opCount, OP_DUP))
case OP_DEPTH :: t => loop(StackInterpreter.opDepth(p), calcOpCount(opCount, OP_DEPTH))
case OP_TOALTSTACK :: t => loop(StackInterpreter.opToAltStack(p), calcOpCount(opCount, OP_TOALTSTACK))
case OP_FROMALTSTACK :: t => loop(StackInterpreter.opFromAltStack(p), calcOpCount(opCount, OP_FROMALTSTACK))
case OP_DROP :: t => loop(StackInterpreter.opDrop(p), calcOpCount(opCount, OP_DROP))
case OP_IFDUP :: t => loop(StackInterpreter.opIfDup(p), calcOpCount(opCount, OP_IFDUP))
case OP_NIP :: t => loop(StackInterpreter.opNip(p), calcOpCount(opCount, OP_NIP))
case OP_OVER :: t => loop(StackInterpreter.opOver(p), calcOpCount(opCount, OP_OVER))
case OP_PICK :: t => loop(StackInterpreter.opPick(p), calcOpCount(opCount, OP_PICK))
case OP_ROLL :: t => loop(StackInterpreter.opRoll(p), calcOpCount(opCount, OP_ROLL))
case OP_ROT :: t => loop(StackInterpreter.opRot(p), calcOpCount(opCount, OP_ROT))
case OP_2ROT :: t => loop(StackInterpreter.op2Rot(p), calcOpCount(opCount, OP_2ROT))
case OP_2DROP :: t => loop(StackInterpreter.op2Drop(p), calcOpCount(opCount, OP_2DROP))
case OP_SWAP :: t => loop(StackInterpreter.opSwap(p), calcOpCount(opCount, OP_SWAP))
case OP_TUCK :: t => loop(StackInterpreter.opTuck(p), calcOpCount(opCount, OP_TUCK))
case OP_2DUP :: t => loop(StackInterpreter.op2Dup(p), calcOpCount(opCount, OP_2DUP))
case OP_3DUP :: t => loop(StackInterpreter.op3Dup(p), calcOpCount(opCount, OP_3DUP))
case OP_2OVER :: t => loop(StackInterpreter.op2Over(p), calcOpCount(opCount, OP_2OVER))
case OP_2SWAP :: t => loop(StackInterpreter.op2Swap(p), calcOpCount(opCount, OP_2SWAP))
//arithmetic operations
case OP_ADD :: t => loop(ArithmeticInterpreter.opAdd(p), calcOpCount(opCount, OP_ADD))
case OP_1ADD :: t => loop(ArithmeticInterpreter.op1Add(p), calcOpCount(opCount, OP_1ADD))
case OP_1SUB :: t => loop(ArithmeticInterpreter.op1Sub(p), calcOpCount(opCount, OP_1SUB))
case OP_SUB :: t => loop(ArithmeticInterpreter.opSub(p), calcOpCount(opCount, OP_SUB))
case OP_ABS :: t => loop(ArithmeticInterpreter.opAbs(p), calcOpCount(opCount, OP_ABS))
case OP_NEGATE :: t => loop(ArithmeticInterpreter.opNegate(p), calcOpCount(opCount, OP_NEGATE))
case OP_NOT :: t => loop(ArithmeticInterpreter.opNot(p), calcOpCount(opCount, OP_NOT))
case OP_0NOTEQUAL :: t => loop(ArithmeticInterpreter.op0NotEqual(p), calcOpCount(opCount, OP_0NOTEQUAL))
case OP_BOOLAND :: t => loop(ArithmeticInterpreter.opBoolAnd(p), calcOpCount(opCount, OP_BOOLAND))
case OP_BOOLOR :: t => loop(ArithmeticInterpreter.opBoolOr(p), calcOpCount(opCount, OP_BOOLOR))
case OP_NUMEQUAL :: t => loop(ArithmeticInterpreter.opNumEqual(p), calcOpCount(opCount, OP_NUMEQUAL))
case OP_NUMEQUALVERIFY :: t => loop(ArithmeticInterpreter.opNumEqualVerify(p), calcOpCount(opCount, OP_NUMEQUALVERIFY))
case OP_NUMNOTEQUAL :: t => loop(ArithmeticInterpreter.opNumNotEqual(p), calcOpCount(opCount, OP_NUMNOTEQUAL))
case OP_LESSTHAN :: t => loop(ArithmeticInterpreter.opLessThan(p), calcOpCount(opCount, OP_LESSTHAN))
case OP_GREATERTHAN :: t => loop(ArithmeticInterpreter.opGreaterThan(p), calcOpCount(opCount, OP_GREATERTHAN))
case OP_LESSTHANOREQUAL :: t => loop(ArithmeticInterpreter.opLessThanOrEqual(p), calcOpCount(opCount, OP_LESSTHANOREQUAL))
case OP_GREATERTHANOREQUAL :: t => loop(ArithmeticInterpreter.opGreaterThanOrEqual(p), calcOpCount(opCount, OP_GREATERTHANOREQUAL))
case OP_MIN :: t => loop(ArithmeticInterpreter.opMin(p), calcOpCount(opCount, OP_MIN))
case OP_MAX :: t => loop(ArithmeticInterpreter.opMax(p), calcOpCount(opCount, OP_MAX))
case OP_WITHIN :: t => loop(ArithmeticInterpreter.opWithin(p), calcOpCount(opCount, OP_WITHIN))
//bitwise operations
case OP_EQUAL :: t => loop(BitwiseInterpreter.opEqual(p), calcOpCount(opCount, OP_EQUAL))
case OP_EQUALVERIFY :: t => loop(BitwiseInterpreter.opEqualVerify(p), calcOpCount(opCount, OP_EQUALVERIFY))
case OP_0 :: t => loop(ScriptProgram(p, ScriptNumber.zero :: p.stack, t), calcOpCount(opCount, OP_0))
case (scriptNumberOp: ScriptNumberOperation) :: t =>
loop(ScriptProgram(p, ScriptNumber(scriptNumberOp.toLong) :: p.stack, t), calcOpCount(opCount, scriptNumberOp))
case (bytesToPushOntoStack: BytesToPushOntoStack) :: t =>
loop(ConstantInterpreter.pushScriptNumberBytesToStack(p), calcOpCount(opCount, bytesToPushOntoStack))
case (scriptNumber: ScriptNumber) :: t =>
loop(ScriptProgram(p, scriptNumber :: p.stack, t), calcOpCount(opCount, scriptNumber))
case OP_PUSHDATA1 :: t => loop(ConstantInterpreter.opPushData1(p), calcOpCount(opCount, OP_PUSHDATA1))
case OP_PUSHDATA2 :: t => loop(ConstantInterpreter.opPushData2(p), calcOpCount(opCount, OP_PUSHDATA2))
case OP_PUSHDATA4 :: t => loop(ConstantInterpreter.opPushData4(p), calcOpCount(opCount, OP_PUSHDATA4))
case (x: ScriptConstant) :: t => loop(ScriptProgram(p, x :: p.stack, t), calcOpCount(opCount, x))
//control operations
case OP_IF :: t => loop(ControlOperationsInterpreter.opIf(p), calcOpCount(opCount, OP_IF))
case OP_NOTIF :: t => loop(ControlOperationsInterpreter.opNotIf(p), calcOpCount(opCount, OP_NOTIF))
case OP_ELSE :: t => loop(ControlOperationsInterpreter.opElse(p), calcOpCount(opCount, OP_ELSE))
case OP_ENDIF :: t => loop(ControlOperationsInterpreter.opEndIf(p), calcOpCount(opCount, OP_ENDIF))
case OP_RETURN :: t => loop(ControlOperationsInterpreter.opReturn(p), calcOpCount(opCount, OP_RETURN))
case OP_VERIFY :: t => loop(ControlOperationsInterpreter.opVerify(p), calcOpCount(opCount, OP_VERIFY))
//crypto operations
case OP_HASH160 :: t => loop(CryptoInterpreter.opHash160(p), calcOpCount(opCount, OP_HASH160))
case OP_CHECKSIG :: t => loop(CryptoInterpreter.opCheckSig(p), calcOpCount(opCount, OP_CHECKSIG))
case OP_CHECKSIGVERIFY :: t => loop(CryptoInterpreter.opCheckSigVerify(p), calcOpCount(opCount, OP_CHECKSIGVERIFY))
case OP_SHA1 :: t => loop(CryptoInterpreter.opSha1(p), calcOpCount(opCount, OP_SHA1))
case OP_RIPEMD160 :: t => loop(CryptoInterpreter.opRipeMd160(p), calcOpCount(opCount, OP_RIPEMD160))
case OP_SHA256 :: t => loop(CryptoInterpreter.opSha256(p), calcOpCount(opCount, OP_SHA256))
case OP_HASH256 :: t => loop(CryptoInterpreter.opHash256(p), calcOpCount(opCount, OP_HASH256))
case OP_CODESEPARATOR :: t => loop(CryptoInterpreter.opCodeSeparator(p), calcOpCount(opCount, OP_CODESEPARATOR))
case OP_CHECKMULTISIG :: t =>
CryptoInterpreter.opCheckMultiSig(p) match {
case newProgram: ExecutedScriptProgram =>
//script was marked invalid for other reasons, don't need to update the opcount
loop(newProgram, opCount)
case newProgram @ (_: ExecutionInProgressScriptProgram | _: PreExecutionScriptProgram) =>
val newOpCount = calcOpCount(opCount, OP_CHECKMULTISIG) + BitcoinScriptUtil.numPossibleSignaturesOnStack(program).toInt
loop(newProgram, newOpCount)
}
case OP_CHECKMULTISIGVERIFY :: t =>
CryptoInterpreter.opCheckMultiSigVerify(p) match {
case newProgram: ExecutedScriptProgram =>
//script was marked invalid for other reasons, don't need to update the opcount
loop(newProgram, opCount)
case newProgram @ (_: ExecutionInProgressScriptProgram | _: PreExecutionScriptProgram) =>
val newOpCount = calcOpCount(opCount, OP_CHECKMULTISIGVERIFY) + BitcoinScriptUtil.numPossibleSignaturesOnStack(program).toInt
loop(newProgram, newOpCount)
}
//reserved operations
case OP_NOP :: t =>
//script discourage upgradeable flag does not apply to a OP_NOP
loop(ScriptProgram(p, p.stack, t), calcOpCount(opCount, OP_NOP))
//if we see an OP_NOP and the DISCOURAGE_UPGRADABLE_OP_NOPS flag is set we must fail our program
case (nop: NOP) :: t if ScriptFlagUtil.discourageUpgradableNOPs(p.flags) =>
logger.error("We cannot execute a NOP when the ScriptVerifyDiscourageUpgradableNOPs is set")
loop(ScriptProgram(p, ScriptErrorDiscourageUpgradableNOPs), calcOpCount(opCount, nop))
case (nop: NOP) :: t => loop(ScriptProgram(p, p.stack, t), calcOpCount(opCount, nop))
case OP_RESERVED :: t =>
logger.error("OP_RESERVED automatically marks transaction invalid")
loop(ScriptProgram(p, ScriptErrorBadOpCode), calcOpCount(opCount, OP_RESERVED))
case OP_VER :: t =>
logger.error("Transaction is invalid when executing OP_VER")
loop(ScriptProgram(p, ScriptErrorBadOpCode), calcOpCount(opCount, OP_VER))
case OP_RESERVED1 :: t =>
logger.error("Transaction is invalid when executing OP_RESERVED1")
loop(ScriptProgram(p, ScriptErrorBadOpCode), calcOpCount(opCount, OP_RESERVED1))
case OP_RESERVED2 :: t =>
logger.error("Transaction is invalid when executing OP_RESERVED2")
loop(ScriptProgram(p, ScriptErrorBadOpCode), calcOpCount(opCount, OP_RESERVED2))
case (reservedOperation: ReservedOperation) :: t =>
logger.error("Undefined operation found which automatically fails the script: " + reservedOperation)
loop(ScriptProgram(p, ScriptErrorBadOpCode), calcOpCount(opCount, reservedOperation))
//splice operations
case OP_SIZE :: t => loop(SpliceInterpreter.opSize(p), calcOpCount(opCount, OP_SIZE))
//locktime operations
case OP_CHECKLOCKTIMEVERIFY :: t =>
//check if CLTV is enforced yet
if (ScriptFlagUtil.checkLockTimeVerifyEnabled(p.flags)) {
loop(LockTimeInterpreter.opCheckLockTimeVerify(p), calcOpCount(opCount, OP_CHECKLOCKTIMEVERIFY))
} //if not, check to see if we should discourage p
else if (ScriptFlagUtil.discourageUpgradableNOPs(p.flags)) {
logger.error("We cannot execute a NOP when the ScriptVerifyDiscourageUpgradableNOPs is set")
loop(ScriptProgram(p, ScriptErrorDiscourageUpgradableNOPs), calcOpCount(opCount, OP_CHECKLOCKTIMEVERIFY))
} //in this case, just reat OP_CLTV just like a NOP and remove it from the stack
else loop(ScriptProgram(p, p.script.tail, ScriptProgram.Script), calcOpCount(opCount, OP_CHECKLOCKTIMEVERIFY))
case OP_CHECKSEQUENCEVERIFY :: t =>
//check if CLTV is enforced yet
if (ScriptFlagUtil.checkSequenceVerifyEnabled(p.flags)) {
loop(LockTimeInterpreter.opCheckSequenceVerify(p), calcOpCount(opCount, OP_CHECKSEQUENCEVERIFY))
} //if not, check to see if we should discourage p
else if (ScriptFlagUtil.discourageUpgradableNOPs(p.flags)) {
logger.error("We cannot execute a NOP when the ScriptVerifyDiscourageUpgradableNOPs is set")
loop(ScriptProgram(p, ScriptErrorDiscourageUpgradableNOPs), calcOpCount(opCount, OP_CHECKSEQUENCEVERIFY))
} //in this case, just read OP_CSV just like a NOP and remove it from the stack
else loop(ScriptProgram(p, p.script.tail, ScriptProgram.Script), calcOpCount(opCount, OP_CHECKSEQUENCEVERIFY))
//no more script operations to run, return whether the program is valid and the final state of the program
case Nil => loop(ScriptProgram.toExecutedProgram(p), opCount)
case h :: t => throw new RuntimeException(h + " was unmatched")
}
}
}
}
/**
* Checks the validity of a transaction in accordance to bitcoin core's CheckTransaction function
* https://github.com/bitcoin/bitcoin/blob/f7a21dae5dbf71d5bc00485215e84e6f2b309d0a/src/main.cpp#L939.
*/
def checkTransaction(transaction: Transaction): Boolean = {
val inputOutputsNotZero = !(transaction.inputs.isEmpty || transaction.outputs.isEmpty)
val txNotLargerThanBlock = transaction.bytes.size < Consensus.maxBlockSize
val outputsSpendValidAmountsOfMoney = !transaction.outputs.exists(o =>
o.value < CurrencyUnits.zero || o.value > Consensus.maxMoney)
val outputValues = transaction.outputs.map(_.value)
val totalSpentByOutputs: CurrencyUnit = outputValues.fold(CurrencyUnits.zero)(_ + _)
val allOutputsValidMoneyRange = validMoneyRange(totalSpentByOutputs)
val prevOutputTxIds = transaction.inputs.map(_.previousOutput.txId)
val noDuplicateInputs = prevOutputTxIds.distinct.size == prevOutputTxIds.size
val isValidScriptSigForCoinbaseTx = transaction.isCoinbase match {
case true => transaction.inputs.head.scriptSignature.asmBytes.size >= 2 &&
transaction.inputs.head.scriptSignature.asmBytes.size <= 100
case false =>
//since this is not a coinbase tx we cannot have any empty previous outs inside of inputs
!transaction.inputs.exists(_.previousOutput == EmptyTransactionOutPoint)
}
inputOutputsNotZero && txNotLargerThanBlock && outputsSpendValidAmountsOfMoney && noDuplicateInputs &&
allOutputsValidMoneyRange && noDuplicateInputs && isValidScriptSigForCoinbaseTx
}
/** Determines if the given currency unit is within the valid range for the system */
def validMoneyRange(currencyUnit: CurrencyUnit): Boolean = {
currencyUnit >= CurrencyUnits.zero && currencyUnit <= Consensus.maxMoney
}
/** Calculates the new op count after the execution of the given [[ScriptToken]] */
private def calcOpCount(oldOpCount: Int, token: ScriptToken): Int = BitcoinScriptUtil.countsTowardsScriptOpLimit(token) match {
case true => oldOpCount + 1
case false => oldOpCount
}
/**
* Checks if the transaction contained a witness that we did not use
* [[https://github.com/bitcoin/bitcoin/blob/528472111b4965b1a99c4bcf08ac5ec93d87f10f/src/script/interpreter.cpp#L1515-L1523]]
* Return true if witness was NOT used, return false if witness was used.
*/
private def hasUnexpectedWitness(program: ScriptProgram): Boolean = {
val txSigComponent = program.txSignatureComponent
logger.debug("TxSigComponent: " + txSigComponent)
val unexpectedWitness = txSigComponent match {
case b: BaseTxSigComponent =>
b.transaction match {
case wtx: WitnessTransaction =>
wtx.witness.witnesses(txSigComponent.inputIndex.toInt).stack.nonEmpty
case _: BaseTransaction => false
}
case _: WitnessTxSigComponentRaw => false
case w: WitnessTxSigComponentP2SH =>
!w.scriptSignature.redeemScript.isInstanceOf[WitnessScriptPubKey]
case r: WitnessTxSigComponentRebuilt =>
r.transaction match {
case wtx: WitnessTransaction =>
wtx.witness.witnesses(txSigComponent.inputIndex.toInt).stack.nonEmpty
case _: BaseTransaction => false
}
}
if (unexpectedWitness) logger.error("Found unexpected witness that was not used by the ScriptProgram: " + program)
unexpectedWitness
}
/**
* Helper function used to rebuild a [[WitnessTxSigComponentRebuilt]]
* this converts a [[WitnessScriptPubKey]] into it's corresponding [[ScriptPubKey]]
*/
private def rebuildWTxSigComponent(old: WitnessTxSigComponent, rebuildScriptPubKey: ScriptPubKey): Try[WitnessTxSigComponentRebuilt] = old match {
case wTxSigComponentRaw: WitnessTxSigComponentRaw =>
Success(WitnessTxSigComponentRebuilt(old.transaction, old.inputIndex,
rebuildScriptPubKey, wTxSigComponentRaw.scriptPubKey, old.flags, old.amount))
case wTxSigComponentP2SH: WitnessTxSigComponentP2SH =>
wTxSigComponentP2SH.witnessScriptPubKey.map { wit: WitnessScriptPubKey =>
WitnessTxSigComponentRebuilt(old.transaction, old.inputIndex,
rebuildScriptPubKey, wit, old.flags, old.amount)
}
}
/** Logic to evaluate a witnesss version that has not been assigned yet */
private def evaluateUnassignedWitness(txSigComponent: TxSigComponent): Try[ExecutedScriptProgram] = {
logger.warn("Unassigned witness inside of witness script pubkey")
val flags = txSigComponent.flags
val discourageUpgradableWitnessVersion = ScriptFlagUtil.discourageUpgradableWitnessProgram(flags)
val program = ScriptProgram(txSigComponent, Nil, Nil, txSigComponent.scriptPubKey.asm, Nil)
if (discourageUpgradableWitnessVersion) {
Success(ScriptProgram(program, ScriptErrorDiscourageUpgradeableWitnessProgram))
} else {
//if we are not discouraging upgradable ops, we just trivially return the program with an OP_TRUE on the stack
//see: https://github.com/bitcoin/bitcoin/blob/b83264d9c7a8ddb79f64bd9540caddc8632ef31f/src/script/interpreter.cpp#L1386-L1389
val evaluated = loop(ScriptProgram(program, Seq(OP_TRUE), ScriptProgram.Stack), 0)
Success(evaluated)
}
}
}
object ScriptInterpreter extends ScriptInterpreter
|
Christewart/bitcoin-s-core
|
src/main/scala/org/bitcoins/core/script/interpreter/ScriptInterpreter.scala
|
Scala
|
mit
| 35,373
|
package memnets.core
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
object Library {
implicit def libtoOpt(lib: Library): Option[Library] = Option(lib)
}
class Library {
private[memnets] val _builders = ArrayBuffer[BldType]()
def builders: Iterable[BldType] = _builders
// Java
def getBuilders: java.util.Collection[BldType] = _builders.asJava
/** helper using scala objects in Java */
def toJava(): Library = this
}
|
MemoryNetworks/memnets
|
api/src/main/scala/memnets/core/Library.scala
|
Scala
|
apache-2.0
| 473
|
package com.wuyuntao.aeneas.migration.example.migrations
import com.wuyuntao.aeneas.migration.Migration
import com.wuyuntao.aeneas.migration.dsl.DbModifier
class CreatePasswordChangedEventTable extends Migration {
def version = 20151030184920483L
def up(db: DbModifier) = {
db.executeSql("""CREATE TABLE password_changed_events (
| event_id timeuuid PRIMARY KEY,
| event_version int,
| new_password text
|)
|""".stripMargin)
}
def down(db: DbModifier) = {
db.executeSql("DROP TABLE password_changed_events")
}
}
|
wuyuntao/Aeneas
|
aeneas-migration-example/src/main/scala/com/wuyuntao/aeneas/migration/example/migrations/V20151030184920483_CreatePasswordChangedEventTable.scala
|
Scala
|
apache-2.0
| 583
|
/**
* (c) Copyright 2013 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.schema.shell
import java.io.IOException
import scala.collection.JavaConversions._
import scala.collection.mutable.Map
import org.apache.avro.Schema
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.HTableDescriptor
import org.apache.hadoop.hbase.client.HBaseAdmin
import org.kiji.annotations.ApiAudience
import org.kiji.schema.Kiji
import org.kiji.schema.KijiMetaTable
import org.kiji.schema.KijiSchemaTable
import org.kiji.schema.KijiURI
import org.kiji.schema.avro.AvroSchema
import org.kiji.schema.avro.TableLayoutDesc
import org.kiji.schema.layout.KijiTableLayout
import org.kiji.schema.security.KijiSecurityManager
import org.kiji.schema.util.ProtocolVersion
import org.kiji.schema.util.ResourceUtils
import org.kiji.schema.util.VersionInfo
/**
* Instances of this class provide the Kiji schema shell with access to KijiSchema.
* Clients of this class should use it to obtain handles to Kiji resources. Clients
* should avoid closing any Kiji resources they obtain through this object. Instead,
* clients should use the {@link KijiSystem#shutdown} method to shutdown all
* resources when done with interacting with Kiji.
*
* <p>Each thread should create its own KijiSystem instance.</p>
*/
@ApiAudience.Private
final class KijiSystem extends AbstractKijiSystem {
// A map from Kiji instance names to internal implementations of Kiji instances.
private val kijis = Map[KijiURI, Kiji]()
// A lazily-initialized HBaseAdmin.
private var maybeHBaseAdmin: Option[HBaseAdmin] = None
/** Return an HBaseAdmin if we have one, initializing one if we don't. */
private def hBaseAdmin: HBaseAdmin = {
maybeHBaseAdmin match {
case Some(admin) => admin
case None => {
val admin = new HBaseAdmin(HBaseConfiguration.create())
maybeHBaseAdmin = Some(admin)
admin
}
}
}
/**
* Gets the meta table for the Kiji instance with the specified name.
*
* @param uri Name of the Kiji instance.
* @return A meta table for the specified Kiji instance, or None if the specified instance
* cannot be opened.
*/
private def kijiMetaTable(uri: KijiURI): Option[KijiMetaTable] = {
kijiCache(uri) match {
case Some(theKiji) => Some(theKiji.getMetaTable())
case None => None
}
}
override def getSystemVersion(uri: KijiURI): ProtocolVersion = {
return VersionInfo.getClusterDataVersion(
kijiCache(uri).getOrElse(throw new DDLException("Could not open " + uri)))
}
override def getOrCreateSchemaId(uri: KijiURI, schema: Schema): Long = {
val kiji: Kiji = kijiCache(uri).getOrElse(throw new DDLException("Could not open " + uri))
val schemaTable: KijiSchemaTable = kiji.getSchemaTable()
val id = schemaTable.getOrCreateSchemaId(schema)
schemaTable.flush()
return id
}
override def getSchemaId(uri: KijiURI, schema: Schema): Option[Long] = {
val kiji: Kiji = kijiCache(uri).getOrElse(throw new DDLException("Could not open " + uri))
val schemaTable: KijiSchemaTable = kiji.getSchemaTable()
// Look up the schema entry. If none exists, return None. Otherwise return Some(theId).
return Option(schemaTable.getSchemaEntry(schema)).map { _.getId}
}
override def getSchemaForId(uri: KijiURI, uid: Long): Option[Schema] = {
val kiji: Kiji = kijiCache(uri).getOrElse(throw new DDLException("Could not open " + uri))
val schemaTable: KijiSchemaTable = kiji.getSchemaTable()
return Option(schemaTable.getSchema(uid))
}
override def getSchemaFor(uri: KijiURI, avroSchema: AvroSchema): Option[Schema] = {
if (avroSchema.getJson != null) {
val schema: Schema = new Schema.Parser().parse(avroSchema.getJson)
return Some(schema)
}
return getSchemaForId(uri, avroSchema.getUid)
}
override def setMeta(uri: KijiURI, table: String, key: String, value: String): Unit = {
val metaTable: KijiMetaTable = kijiMetaTable(uri).getOrElse(
throw new IOException("Cannot get metatable for URI " + uri))
metaTable.putValue(table, key, value.getBytes())
}
override def getMeta(uri: KijiURI, table: String, key: String): Option[String] = {
val metaTable: KijiMetaTable = kijiMetaTable(uri).getOrElse(
throw new IOException("Cannot get metatable for URI " + uri))
try {
val bytes: Array[Byte] = metaTable.getValue(table, key)
return Some(new String(bytes, "UTF-8"))
} catch {
case ioe: IOException => return None // Key not found.
}
}
override def getSecurityManager(uri: KijiURI): KijiSecurityManager = {
kijiCache(uri) match {
case Some(kiji) =>
return kiji.getSecurityManager()
case None =>
throw new IOException("Cannot open kiji: %s".format(uri.toString))
}
}
/**
* Gets the Kiji instance implementation for the Kiji instance with the specified name.
*
* <p>This method caches the Kiji instances opened.</p>
*
* @param uri Name of the Kiji instance.
* @return An Kiji for the Kiji instance with the specified name, or none if the
* instance specified cannot be opened.
*/
private def kijiCache(uri: KijiURI): Option[Kiji] = {
if (!kijis.contains(uri)) {
try {
val theKiji = Kiji.Factory.open(uri)
kijis += (uri -> theKiji)
Some(theKiji)
} catch {
case exn: Exception => {
exn.printStackTrace()
None
}
}
} else {
Some(kijis(uri))
}
}
override def getTableNamesDescriptions(uri: KijiURI): Array[(String, String)] = {
// Get all table names.
val tableNames: List[String] = kijiCache(uri) match {
case Some(kiji) => kiji.getTableNames().toList
case None => List()
}
// join table names and descriptions.
tableNames.map { name =>
kijiMetaTable(uri) match {
case Some(metaTable) => {
val description = metaTable.getTableLayout(name).getDesc().getDescription()
(name, description)
}
case None => (name, "")
}
}.toArray
}
override def getTableLayout(uri: KijiURI, table: String): Option[KijiTableLayout] = {
kijiMetaTable(uri) match {
case Some(metaTable) => {
try {
val layout = metaTable.getTableLayout(table)
Some(layout)
} catch {
case _: IOException => None
}
}
case None => None
}
}
override def createTable(uri: KijiURI, layout: KijiTableLayout, numRegions: Int): Unit = {
kijiCache(uri) match {
case Some(kiji) => { kiji.createTable(layout.getDesc(), numRegions) }
case None => { throw new IOException("Cannot get kiji for \\"" + uri.toString() + "\\"") }
}
}
override def applyLayout(uri: KijiURI, table: String, layout: TableLayoutDesc): Unit = {
kijiCache(uri) match {
case Some(kiji) => { kiji.modifyTableLayout(layout, false, Console.out) }
case None => { throw new IOException("Cannot get kiji for \\"" + uri.toString() + "\\"") }
}
}
override def dropTable(uri: KijiURI, table: String): Unit = {
kijiCache(uri) match {
case Some(kiji) => { kiji.deleteTable(table) }
case None => { throw new IOException("Cannot get kiji for \\"" + uri.toString() + "\\"") }
}
}
override def listInstances(): Set[String] = {
def parseInstanceName(kijiTableName: String): Option[String] = {
val parts: Seq[String] = kijiTableName.split('.')
if (parts.length < 3 || !KijiURI.KIJI_SCHEME.equals(parts.head)) {
None
} else {
Some(parts(1))
}
}
val hTableDescriptors: List[HTableDescriptor] = hBaseAdmin.listTables().toList;
val kijiInstanceNames: Set[String] = hTableDescriptors.foldLeft(Set():Set[String])({
(names: Set[String], htableDesc) =>
val instanceName: Option[String] = parseInstanceName(htableDesc.getNameAsString())
instanceName match {
case Some(instance) => { names + instance }
case None => { names }
}
})
return kijiInstanceNames
}
override def shutdown(): Unit = {
maybeHBaseAdmin match {
case None => { /* do nothing. */ }
case Some(admin) => {
// Close this.
ResourceUtils.closeOrLog(hBaseAdmin)
maybeHBaseAdmin = None
}
}
kijis.foreach { case (key, refCountable) =>
ResourceUtils.releaseOrLog(refCountable) }
kijis.clear
}
}
|
kijiproject/kiji-schema-shell
|
src/main/scala/org/kiji/schema/shell/KijiSystem.scala
|
Scala
|
apache-2.0
| 9,159
|
package io.megam.gradle
import scalaz._
import Scalaz._
import scalaz.Validation.FlatMap._
import org.gradle.tooling.BuildLauncher
import org.gradle.tooling.GradleConnector
import org.gradle.tooling.ProjectConnection
import java.io.File
class GradleBuildManager(raw: YonpiRaw) extends BuildManager {
override def build(): ValidationNel[Throwable, YonpiRaw] = {
taskRun("assemble")
}
override def clean(): scalaz.ValidationNel[Throwable, io.megam.gradle.YonpiRaw] = {
taskRun("clean")
}
private def taskRun(task: String): ValidationNel[Throwable, YonpiRaw] = {
(Validation.fromTryCatchThrowable[io.megam.gradle.YonpiRaw, Throwable] {
val connection: ProjectConnection = GradleConnector.newConnector.forProjectDirectory(raw.root).connect
val launcher: BuildLauncher = connection.newBuild
launcher.forTasks(task)
launcher.setStandardOutput(System.out)
launcher.setStandardError(System.err)
launcher.run
connection.close
raw
} leftMap { t: Throwable => t }).toValidationNel
}
}
|
megamsys/sparkbuilder
|
src/main/scala/io/megam/gradle/GradleBuildManager.scala
|
Scala
|
apache-2.0
| 1,057
|
package com.giyeok.jparser.nparser
import com.giyeok.jparser.Inputs.Input
import com.giyeok.jparser.NGrammar._
import com.giyeok.jparser.nparser.AcceptCondition.AcceptCondition
import com.giyeok.jparser.nparser.ParseTreeConstructor2.{KernelCore, Kernels}
import com.giyeok.jparser.nparser.ParsingContext.{Graph, Kernel}
import com.giyeok.jparser._
// Accept condition을 모두 통과한 node들의 커널들만. 커널사이의 관계는 grammar를 통해 유추하고 생략함.
class ParseTreeConstructor2[R <: ParseResult](resultFunc: ParseResultFunc[R])(grammar: NGrammar)(input: Seq[Input], history: Seq[Kernels]) {
def reconstruct(): Option[R] =
reconstruct(Kernel(grammar.startSymbol, 1, 0, input.length))
def reconstruct(kernel: Kernel): Option[R] =
if (kernel.pointer > 0 && history(kernel.endGen).kernels.contains(kernel)) Some(reconstruct(kernel, Set())) else None
private def reconstruct(kernel: Kernel, traces: Set[KernelCore]): R = {
val gen = kernel.endGen
def reconstruct0(child: Kernel): R = {
val newTraces: Set[KernelCore] = if ((kernel.beginGen, gen) != (child.beginGen, child.endGen)) Set()
else traces + KernelCore(kernel.symbolId, kernel.pointer)
reconstruct(child, newTraces)
}
grammar.symbolOf(kernel.symbolId) match {
case symbol: NAtomicSymbol if traces contains KernelCore(kernel.symbolId, kernel.pointer) =>
resultFunc.cyclicBind(kernel.beginGen, gen, symbol)
case symbol: NSequence if traces contains KernelCore(kernel.symbolId, kernel.pointer) =>
resultFunc.sequence(kernel.beginGen, gen, symbol, kernel.pointer)
case symbol@NSequence(_, _, sequence) =>
if (sequence.isEmpty) {
assert(kernel.pointer == 0 && kernel.beginGen == kernel.endGen && kernel.beginGen == gen)
resultFunc.bind(kernel.beginGen, gen, symbol, resultFunc.sequence(kernel.beginGen, kernel.endGen, symbol, 0))
} else if (kernel.pointer == 0) {
assert(kernel.beginGen == kernel.endGen)
resultFunc.sequence(kernel.beginGen, kernel.endGen, symbol, 0)
} else {
val (symbolId, prevPointer) = (kernel.symbolId, kernel.pointer - 1)
val prevKernels = history(gen).kernels filter { kern =>
(kern.symbolId == symbolId) && (kern.pointer == prevPointer) && (kern.beginGen == kernel.beginGen)
}
val trees = prevKernels.toSeq.sortBy(_.tuple) flatMap { prevKernel =>
val childKernel = Kernel(sequence(prevPointer), 1, prevKernel.endGen, gen)
if (history(gen).kernels contains childKernel) {
val precedingTree = reconstruct0(Kernel(kernel.symbolId, prevPointer, kernel.beginGen, prevKernel.endGen))
val childTree = reconstruct0(childKernel)
// println(s"preceding: $precedingTree")
// println(s"child: $childTree")
Some(resultFunc.append(precedingTree, childTree))
} else None
}
val appendedSeq = resultFunc.merge(trees)
if (kernel.pointer == sequence.length) resultFunc.bind(kernel.beginGen, gen, symbol, appendedSeq) else appendedSeq
}
case symbol@NJoin(_, _, body, join) =>
assert(kernel.pointer == 1)
val bodyKernel = Kernel(body, 1, kernel.beginGen, kernel.endGen)
val joinKernel = Kernel(join, 1, kernel.beginGen, kernel.endGen)
val bodyTree = reconstruct0(bodyKernel)
val joinTree = reconstruct0(joinKernel)
resultFunc.join(kernel.beginGen, kernel.endGen, symbol, bodyTree, joinTree)
case symbol: NTerminal =>
resultFunc.bind(kernel.beginGen, kernel.endGen, symbol,
resultFunc.terminal(kernel.beginGen, input(kernel.beginGen)))
case symbol: NAtomicSymbol =>
assert(kernel.pointer == 1)
def lastKernel(symbolId: Int) =
Kernel(symbolId, Kernel.lastPointerOf(grammar.symbolOf(symbolId)), kernel.beginGen, gen)
// assert(finishes(gen).edgesByStart(prevKernel) forall { _.isInstanceOf[SimpleKernelEdge] })
// TODO history(gen).finished 중에서 kernel에서 derive된 것들이고 (beginGen==kernel.beginGen && endGen==gen)인 것들에 대해서 reconstruct
val bodyKernels: Set[Kernel] = grammar.nsymbols(kernel.symbolId) match {
case deriver: NSimpleDerive => deriver.produces.map(lastKernel)
case NGrammar.NExcept(_, _, body, _) => Set(lastKernel(body))
case NGrammar.NLongest(_, _, body) => Set(lastKernel(body))
case symbol: NGrammar.NLookaheadSymbol => Set(lastKernel(symbol.emptySeqId))
case _: NTerminal | _: NJoin => assert(false); ???
}
val validKernels = history(gen).kernels intersect bodyKernels
assert(validKernels.nonEmpty)
val bodyTrees = validKernels.toSeq.sortBy(_.tuple) map {
bodyKernel => reconstruct0(bodyKernel)
}
assert(bodyTrees.nonEmpty)
resultFunc.bind(kernel.beginGen, kernel.endGen, symbol, resultFunc.merge(bodyTrees))
}
}
}
object ParseTreeConstructor2 {
case class Kernels(kernels: Set[Kernel])
case class KernelCore(symbolId: Int, pointer: Int)
def kernelsFrom(history: Seq[Graph], conditionFinal: Map[AcceptCondition, Boolean]): Seq[Kernels] =
history.map(_.filterNode(node => conditionFinal(node.condition)))
.map(graph => Kernels(graph.nodes.map(_.kernel)))
def constructor[R <: ParseResult](resultFunc: ParseResultFunc[R])(grammar: NGrammar)
(input: Seq[Input], history: Seq[Graph], conditionFinal: Map[AcceptCondition, Boolean]): ParseTreeConstructor2[R] =
new ParseTreeConstructor2[R](resultFunc)(grammar)(input, kernelsFrom(history, conditionFinal))
def forestConstructor(grammar: NGrammar)
(input: Seq[Input], history: Seq[Graph], conditionFinal: Map[AcceptCondition, Boolean]): ParseTreeConstructor2[ParseForest] =
constructor(ParseForestFunc)(grammar)(input, history, conditionFinal)
}
|
Joonsoo/moon-parser
|
naive/src/main/scala/com/giyeok/jparser/nparser/ParseTreeConstructor2.scala
|
Scala
|
mit
| 6,021
|
package io.circe
package object jawn extends JawnParser
|
travisbrown/circe
|
modules/jawn/src/main/scala/io/circe/jawn/package.scala
|
Scala
|
apache-2.0
| 57
|
package com.gilazaria.subsearch.output
import com.gilazaria.subsearch.model.{Record, RecordType}
import com.gilazaria.subsearch.utils.File
import scala.collection.SortedSet
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
class StandardOutput(private val file: Option[File], private val verbose: Boolean) extends Output {
private var saveToFileFuture: Future[Unit] = Future(Unit)
override def print(string: String): Unit = {
if (file.isDefined) {
saveToFileFuture = saveToFileFuture.map {
_ => file.get.write(string)
}
}
}
override def writingToFileFuture: Future[Unit] = {
saveToFileFuture
}
override def printRecords(records: SortedSet[Record]) = {
if (verbose) printRecordsVerbose(records)
else printRecordsNormal(records)
}
protected def printRecordsVerbose(records: SortedSet[Record]) = {
val lines: List[String] =
records
.map(_.name)
.toList
.flatMap {
subdomain =>
val subdomainRecords: SortedSet[Record] = records.filter(_.name == subdomain)
val recordTypes: SortedSet[RecordType] = subdomainRecords.map(_.recordType)
recordTypes.flatMap {
recordType =>
subdomainRecords.filter(_.recordType == recordType).map {
case Record(_, _, data) =>
val msg = formatRecordTypeAndSubdomainForPrinting(recordType, subdomain)
if (recordType.isOneOf("A", "AAAA", "CNAME", "NS", "SRV"))
s"$msg -> $data"
else if (recordType.stringValue == "MX")
s"$msg @@ $data"
else
s"$msg -- $data"
}
}
}
if (lines.nonEmpty)
println(lines.mkString("\\n"))
}
protected def formatRecordTypeAndSubdomainForPrinting(recordType: RecordType, subdomain: String): String =
prependTime(f"${recordType.toString}%-7s: $subdomain")
protected def printRecordsNormal(records: SortedSet[Record]) = {
val lines: List[String] =
records
.map(_.name)
.toList
.map(subdomain => (subdomain, records.filter(_.name == subdomain).map(_.recordType)))
.map((data: (String, SortedSet[RecordType])) => s"${data._2.mkString(", ")}: ${data._1}")
if (lines.nonEmpty)
printSuccess(lines.mkString("\\n"))
}
}
object StandardOutput {
def create(fileOption: Option[File], verbose: Boolean): Option[StandardOutput] =
if (fileOption.isDefined) Some(new StandardOutput(fileOption, verbose))
else None
}
|
gavia/subsearch
|
src/main/scala/com/gilazaria/subsearch/output/StandardOutput.scala
|
Scala
|
gpl-2.0
| 2,654
|
package play.api.libs.json
import scala.language.reflectiveCalls
/**
* Helper functions to handle JsValues.
*/
object Json {
/**
* Parse a String representing a json, and return it as a JsValue.
*
* @param input a String to parse
* @return the JsValue representing the string
*/
def parse(input: String): JsValue = JacksonJson.parseJsValue(input)
/**
* Parse a byte array representing a json, and return it as a JsValue.
*
* The character encoding used will be automatically detected as UTF-8, UTF-16 or UTF-32, as per the heuristics in
* RFC-4627.
*
* @param input a byte array to parse
* @return the JsValue representing the byte array
*/
def parse(input: Array[Byte]): JsValue = JacksonJson.parseJsValue(input)
/**
* Convert a JsValue to its string representation.
*
* {{{
* scala> Json.stringify(Json.obj(
* "field1" -> Json.obj(
* "field11" -> "value11",
* "field12" -> Json.arr("alpha", 123L)
* )
* ))
* res0: String = {"field1":{"field11":"value11","field12":["alpha",123]}}
*
* scala> Json.stringify(res0)
* res1: String = {"field1":{"field11":"value11","field12":["alpha",123]}}
* }}}
*
* @param json the JsValue to convert
* @return a String with the json representation
*/
def stringify(json: JsValue): String = JacksonJson.generateFromJsValue(json)
/**
* Convert a JsValue to its pretty string representation using default Jackson
* pretty printer (line feeds after each fields and 2-spaces indentation).
*
* {{{
* scala> Json.stringify(Json.obj(
* "field1" -> Json.obj(
* "field11" -> "value11",
* "field12" -> Json.arr("alpha", 123L)
* )
* ))
* res0: String = {"field1":{"field11":"value11","field12":["alpha",123]}}
*
* scala> Json.prettyPrint(res0)
* res1: String =
* {
* "field1" : {
* "field11" : "value11",
* "field12" : [ "alpha", 123 ]
* }
* }
* }}}
*
* @param json the JsValue to convert
* @return a String with the json representation
*/
def prettyPrint(json: JsValue): String = JacksonJson.prettyPrint(json)
/**
* Provided a Reads implicit for its type is available, convert any object into a JsValue.
*
* @param o Value to convert in Json.
*/
def toJson[T](o: T)(implicit tjs: Writes[T]): JsValue = tjs.writes(o)
/**
* Provided a Writes implicit for that type is available, convert a JsValue to any type.
*
* @param json Json value to transform as an instance of T.
*/
def fromJson[T](json: JsValue)(implicit fjs: Reads[T]): JsResult[T] = fjs.reads(json)
/**
* Next is the trait that allows Simplified Json syntax :
*
* Example :
* {{{
* JsObject(Seq(
* "key1", JsString("value"),
* "key2" -> JsNumber(123),
* "key3" -> JsObject(Seq("key31" -> JsString("value31")))
* )) == Json.obj( "key1" -> "value", "key2" -> 123, "key3" -> obj("key31" -> "value31"))
*
* JsArray(JsString("value"), JsNumber(123), JsBoolean(true)) == Json.arr( "value", 123, true )
* }}}
*
* There is an implicit conversion from any Type with a Json Writes to JsValueWrapper
* which is an empty trait that shouldn't end into unexpected implicit conversions.
*
* Something to note due to `JsValueWrapper` extending `NotNull` :
* `null` or `None` will end into compiling error : use JsNull instead.
*/
sealed trait JsValueWrapper extends NotNull
private case class JsValueWrapperImpl(field: JsValue) extends JsValueWrapper
import scala.language.implicitConversions
implicit def toJsFieldJsValueWrapper[T](field: T)(implicit w: Writes[T]): JsValueWrapper = JsValueWrapperImpl(w.writes(field))
def obj(fields: (String, JsValueWrapper)*): JsObject = JsObject(fields.map(f => (f._1, f._2.asInstanceOf[JsValueWrapperImpl].field)))
def arr(fields: JsValueWrapper*): JsArray = JsArray(fields.map(_.asInstanceOf[JsValueWrapperImpl].field))
import play.api.libs.iteratee.Enumeratee
/**
* Transform a stream of A to a stream of JsValue
* {{{
* val fooStream: Enumerator[Foo] = ???
* val jsonStream: Enumerator[JsValue] = fooStream &> Json.toJson
* }}}
*/
def toJson[A : Writes]: Enumeratee[A, JsValue] = Enumeratee.map(Json.toJson(_))
/**
* Transform a stream of JsValue to a stream of A, keeping only successful results
* {{{
* val jsonStream: Enumerator[JsValue] = ???
* val fooStream: Enumerator[Foo] = jsonStream &> Json.fromJson
* }}}
*/
def fromJson[A : Reads]: Enumeratee[JsValue, A] =
Enumeratee.map((json: JsValue) => Json.fromJson(json)) ><> Enumeratee.collect { case JsSuccess(value, _) => value }
/**
* Experimental JSON extensions to replace asProductXXX by generating
* Reads[T]/Writes[T]/Format[T] from case class at COMPILE time using
* new Scala 2.10 macro & reflection features.
*/
import scala.reflect.macros.Context
import language.experimental.macros
/**
* Creates a Reads[T] by resolving case class fields & required implcits at COMPILE-time.
*
* If any missing implicit is discovered, compiler will break with corresponding error.
* {{{
* import play.api.libs.json.Json
*
* case class User(name: String, age: Int)
*
* implicit val userReads = Json.reads[User]
* // macro-compiler replaces Json.reads[User] by injecting into compile chain
* // the exact code you would write yourself. This is strictly equivalent to:
* implicit val userReads = (
* (__ \\ 'name).read[String] and
* (__ \\ 'age).read[Int]
* )(User)
* }}}
*/
def reads[A] = macro JsMacroImpl.readsImpl[A]
/**
* Creates a Writes[T] by resolving case class fields & required implcits at COMPILE-time
*
* If any missing implicit is discovered, compiler will break with corresponding error.
* {{{
* import play.api.libs.json.Json
*
* case class User(name: String, age: Int)
*
* implicit val userWrites = Json.writes[User]
* // macro-compiler replaces Json.writes[User] by injecting into compile chain
* // the exact code you would write yourself. This is strictly equivalent to:
* implicit val userWrites = (
* (__ \\ 'name).write[String] and
* (__ \\ 'age).write[Int]
* )(unlift(User.unapply))
* }}}
*/
def writes[A] = macro JsMacroImpl.writesImpl[A]
/**
* Creates a Format[T] by resolving case class fields & required implicits at COMPILE-time
*
* If any missing implicit is discovered, compiler will break with corresponding error.
* {{{
* import play.api.libs.json.Json
*
* case class User(name: String, age: Int)
*
* implicit val userWrites = Json.format[User]
* // macro-compiler replaces Json.format[User] by injecting into compile chain
* // the exact code you would write yourself. This is strictly equivalent to:
* implicit val userWrites = (
* (__ \\ 'name).format[String] and
* (__ \\ 'age).format[Int]
* )(User.apply, unlift(User.unapply))
* }}}
*/
def format[A] = macro JsMacroImpl.formatImpl[A]
}
|
noel-yap/setter-for-catan
|
play-2.1.1/framework/src/play/src/main/scala/play/api/libs/json/Json.scala
|
Scala
|
apache-2.0
| 7,159
|
/*
* @author Genc Mazlami
*
* Copyright 2013 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.dcop.exceptions
class TableEntryNotFoundException(msg:String) extends RuntimeException(msg)
|
gmazlami/dcop-maxsum
|
src/main/scala/com/signalcollect/dcop/exceptions/TableEntryNotFoundException.scala
|
Scala
|
apache-2.0
| 771
|
package org.jetbrains.plugins.scala.findUsages.compilerReferences.bytecode
import org.jetbrains.jps.backwardRefs.CompilerRef
private trait CompilerRefProvider[From] extends (From => CompilerRef) {
def toCompilerRef(from: From): CompilerRef
override def apply(from: From): CompilerRef = toCompilerRef(from)
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/findUsages/compilerReferences/bytecode/CompilerRefProvider.scala
|
Scala
|
apache-2.0
| 314
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ing.wbaa.druid
import java.util.concurrent.TimeoutException
import scala.concurrent.duration._
import scala.language.postfixOps
import akka.http.scaladsl.model.{ HttpProtocols, StatusCodes }
import akka.http.scaladsl.model.headers.RawHeader
import com.ing.wbaa.druid.client.{ DruidHttpClient, HttpStatusException }
import com.ing.wbaa.druid.definitions.{ CountAggregation, GranularityType }
import com.ing.wbaa.druid.util._
import org.scalatest.concurrent._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
class DruidClientSpec extends AnyWordSpec with Matchers with ScalaFutures {
override implicit def patienceConfig: PatienceConfig = PatienceConfig(10 seconds, 100 millis)
"DruidClient" should {
"indicate when Druid is healthy" in {
val config = DruidConfig(clientBackend = classOf[DruidHttpClient])
val client = config.client
whenReady(client.isHealthy()) { result =>
result shouldBe true
}
}
"indicate when Druid is not healthy" in {
val config = DruidConfig(clientBackend = classOf[DruidHttpClient],
hosts = Seq(QueryHost("localhost", 8187)))
val client = config.client
whenReady(client.isHealthy()) { result =>
result shouldBe false
}
}
"fail to load when having multiple query nodes" in {
val config = DruidConfig(clientBackend = classOf[DruidHttpClient],
hosts =
Seq(QueryHost("localhost", 8082), QueryHost("localhost", 8183)))
assertThrows[IllegalStateException] {
config.client
}
}
"throw HttpStatusException for non-200 status codes" in {
val config = DruidConfig(clientBackend = classOf[DruidHttpClient],
hosts = Seq(QueryHost("localhost", 8186))) // yields HTTP 500
val client = config.client
val responseFuture = client.doQuery(
TimeSeriesQuery(
aggregations = List(
CountAggregation(name = "count")
),
granularity = GranularityType.Hour,
intervals = List("2011-06-01/2017-06-01")
)
)
whenReady(responseFuture.failed) {
case exception: HttpStatusException =>
exception.status shouldBe StatusCodes.InternalServerError
exception.protocol shouldBe HttpProtocols.`HTTP/1.1`
exception.headers should contain(new RawHeader("x-clusterfk-status-code", "500"))
exception.entity.get.isKnownEmpty() shouldBe true
case response => fail(s"expected HttpStatusException, got $response")
}
client.shutdown().futureValue
}
"throw HttpStatusException for non-200 status codes where body fails to materialize" in {
// the endpoint on 8087 returns HTTP 502 and takes 5 seconds to send the response body
implicit val config =
DruidConfig(
clientBackend = classOf[DruidHttpClient],
responseParsingTimeout = 1.seconds,
hosts = Seq(QueryHost("localhost", 8187))
)
val client = config.client
val responseFuture = client.doQuery(
TimeSeriesQuery(
aggregations = List(
CountAggregation(name = "count")
),
granularity = GranularityType.Hour,
intervals = List("2011-06-01/2017-06-01")
)
)
whenReady(responseFuture.failed) {
case exception: HttpStatusException =>
exception.status shouldBe StatusCodes.BadGateway
exception.entity.isFailure shouldBe true
exception.entity.failed.get shouldBe a[TimeoutException]
case response => fail(s"expected HttpStatusException, got $response")
}
config.client.shutdown().futureValue
}
"throw HttpStatusException when pushing an invalid query" in {
implicit val config: DruidConfig =
DruidConfig(
clientBackend = classOf[DruidHttpClient],
hosts = Seq(QueryHost("localhost", 8082))
)
val client = config.client
val responseFuture = client.doQuery(
TimeSeriesQuery(
aggregations = List(
CountAggregation(name = "count")
),
intervals = List("invalid interval")
)
)
whenReady(responseFuture.failed) {
case exception: HttpStatusException =>
exception.status shouldBe StatusCodes.InternalServerError
exception.entity.isFailure shouldBe false
exception.entity.get.data.utf8String shouldBe
"""{
|"error":"Unknown exception",
|"errorMessage":"Cannot construct instance of `org.apache.druid.query.spec.LegacySegmentSpec`,
| problem: Format requires a '/' separator: invalid interval\\n
| at [Source: (org.eclipse.jetty.server.HttpInputOverHTTP); line: 1, column: 186]
| (through reference chain: org.apache.druid.query.timeseries.TimeseriesQuery[\\"intervals\\"])",
|"errorClass":"com.fasterxml.jackson.databind.exc.ValueInstantiationException",
|"host":null
|}""".toOneLine
case response => fail(s"expected HttpStatusException, got $response")
}
config.client.shutdown().futureValue
}
}
}
|
ing-bank/scruid
|
src/test/scala/com/ing/wbaa/druid/DruidClientSpec.scala
|
Scala
|
apache-2.0
| 6,125
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.optim
import breeze.linalg.*
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.{T, Table}
import scala.math._
import scala.reflect.ClassTag
/**
* An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf
* @param learningRate learning rate
* @param learningRateDecay learning rate decay
* @param beta1 first moment coefficient
* @param beta2 second moment coefficient
* @param Epsilon for numerical stability
* @tparam T
*/
class Adam[@specialized(Float, Double) T: ClassTag](
var learningRate: Double = 1e-3,
var learningRateDecay: Double = 0.0,
var beta1: Double = 0.9,
var beta2: Double = 0.999,
var Epsilon: Double = 1e-8)(implicit ev: TensorNumeric[T]) extends OptimMethod[T] {
/**
* An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf
*
* @param feval a function that takes a single input (X), the point of a evaluation, and
* returns f(X) and df/dX
* @param parameter the initial point
* @return the new x vector and the function list {fx}, evaluated before the update
*/
override def optimize(feval: (Tensor[T]) => (T, Tensor[T]),
parameter: Tensor[T]): (Tensor[T], Array[T]) = {
val lr = this.learningRate
val lrd = this.learningRateDecay
val beta1 = this.beta1
val beta2 = this.beta2
val eps = this.Epsilon
val (fx, dfdx) = feval(parameter)
var timestep = state.getOrElse[Int]("evalCounter", 0)
val (_s, _r, _denom) =
if (state.get[Tensor[T]]("s").isDefined) {
(state.get[Tensor[T]]("s").get, state.get[Tensor[T]]("r").get,
Tensor[T]().resizeAs(dfdx).zero())
} else {
(Tensor[T]().resizeAs(dfdx).zero(), Tensor[T]().resizeAs(dfdx).zero(),
Tensor[T]().resizeAs(dfdx).zero())
}
val clr = lr / (1 + timestep*lrd)
timestep = timestep + 1
_s.mul(ev.fromType[Double](beta1)).add(ev.fromType[Double](1-beta1), dfdx)
_r.mul(ev.fromType[Double](beta2)).addcmul(ev.fromType[Double](1-beta2), dfdx, dfdx)
_denom.resizeAs(_r).copy(_r).sqrt().add(ev.fromType[Double](eps))
// efficiency improved upon by changing the order of computation, at expense of clarity
val biasCorrection1 = 1 - pow(beta1, timestep)
val biasCorrection2 = 1 - pow(beta2, timestep)
val stepSize = clr * sqrt(biasCorrection2) / biasCorrection1
parameter.addcdiv(ev.fromType[Double](-stepSize), _s, _denom)
state("evalCounter") = timestep // A tmp tensor to hold the sqrt(v) + epsilon
state("s") = _s // 1st moment variables
state("r") = _r // 2nd moment variables
(parameter, Array(fx))
}
override def loadFromTable(config: Table): this.type = {
this.learningRate = config.get[Double]("learningRate").getOrElse(this.learningRate)
this.learningRateDecay = config.get[Double]("learningRateDecay")
.getOrElse(this.learningRateDecay)
this.beta1 = config.get[Double]("beta1").getOrElse(this.beta1)
this.beta2 = config.get[Double]("beta2").getOrElse(this.beta2)
this.Epsilon = config.get[Double]("Epsilon").getOrElse(this.Epsilon)
this
}
override def clearHistory(): Unit = {
state.delete("s")
state.delete("r")
}
override def getLearningRate(): Double = this.learningRate
}
|
psyyz10/BigDL
|
spark/dl/src/main/scala/com/intel/analytics/bigdl/optim/Adam.scala
|
Scala
|
apache-2.0
| 3,976
|
/* __ *\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ |/_// /_\ \ http://scala-js.org/ **
** /____/\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\* */
package org.scalajs.testsuite.jsinterop
import scala.scalajs.js
import scala.scalajs.js.JSConverters._
import org.scalajs.jasminetest.JasmineTest
import scala.concurrent.{Future, ExecutionContext}
import scala.scalajs.concurrent.JSExecutionContext
import scala.collection.mutable.ArrayBuffer
import org.scalajs.jasmine.JasmineExpectation
object AsyncTest extends JasmineTest {
def asyncTest(implicit executor: ExecutionContext) = {
val steps = new ArrayBuffer[String]
steps += "prep-future"
val f1 = Future {
steps += "future"
1 + 2 + 3
}
steps += "prep-map"
val f2 = f1 map { x =>
steps += "map"
x * 2
}
steps += "prep-foreach"
f2 foreach { _ => steps += "foreach" }
steps += "done"
steps
}
def expect(abuf: ArrayBuffer[String]): JasmineExpectation =
expect(abuf.toJSArray)
def queueExecOrderTests(implicit executor: ExecutionContext) = {
it("should correctly order future calls") {
val res = asyncTest
expect(res).toEqual(js.Array(
"prep-future",
"prep-map",
"prep-foreach",
"done"))
jasmine.Clock.tick(1)
expect(res).toEqual(js.Array(
"prep-future",
"prep-map",
"prep-foreach",
"done",
"future",
"map",
"foreach"))
}
}
describe("scala.scalajs.concurrent.JSExecutionContext.queue") {
beforeEach {
jasmine.Clock.useMock()
}
queueExecOrderTests(JSExecutionContext.queue)
}
describe("scala.scalajs.concurrent.JSExecutionContext.runNow") {
it("should correctly order future calls") {
val res = asyncTest(JSExecutionContext.runNow)
expect(res).toEqual(js.Array(
"prep-future",
"future",
"prep-map",
"map",
"prep-foreach",
"foreach",
"done"))
}
}
describe("scala.scala.concurrent.ExecutionContext.global") {
beforeEach {
jasmine.Clock.useMock()
}
it("should be a queue execution context") {
expect(ExecutionContext.global eq JSExecutionContext.queue).toBeTruthy
}
queueExecOrderTests(ExecutionContext.global)
}
describe("scala.concurrent.Future") {
it("should support map") {
implicit val ec = JSExecutionContext.runNow
val f = Future(3).map(x => x*2)
expect(f.value.get.get).toEqual(6)
}
it("should support flatMap") {
implicit val ec = JSExecutionContext.runNow
val f = Future(Future(3)).flatMap(x => x)
expect(f.value.get.get).toEqual(3)
}
it("should support sequence") {
implicit val ec = JSExecutionContext.runNow
val f = Future.sequence(Seq(Future(3), Future(5)))
expect(f.value.get.get.toJSArray).toEqual(js.Array(3, 5))
}
}
}
|
colinrgodsey/scala-js
|
test-suite/src/test/scala/org/scalajs/testsuite/jsinterop/AsyncTest.scala
|
Scala
|
bsd-3-clause
| 3,330
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.utils
import org.apache.calcite.plan.RelOptUtil
import org.apache.calcite.rel.RelNode
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.{LocalEnvironment, DataSet => JDataSet, ExecutionEnvironment => JExecutionEnvironment}
import org.apache.flink.api.scala.{DataSet, ExecutionEnvironment}
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.datastream.{DataStream => JDataStream}
import org.apache.flink.streaming.api.environment.{LocalStreamEnvironment, StreamExecutionEnvironment => JStreamExecutionEnvironment}
import org.apache.flink.streaming.api.functions.source.SourceFunction
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.scala._
import org.apache.flink.table.api.{Table, TableEnvironment, TableSchema}
import org.apache.flink.table.expressions.Expression
import org.apache.flink.table.functions.{AggregateFunction, ScalarFunction, TableFunction}
import org.junit.Assert.assertEquals
import org.junit.{ComparisonFailure, Rule}
import org.junit.rules.ExpectedException
import org.mockito.Mockito.{mock, when}
import util.control.Breaks._
/**
* Test base for testing Table API / SQL plans.
*/
class TableTestBase {
// used for accurate exception information checking.
val expectedException = ExpectedException.none()
@Rule
def thrown = expectedException
def batchTestUtil(): BatchTableTestUtil = {
BatchTableTestUtil()
}
def streamTestUtil(): StreamTableTestUtil = {
StreamTableTestUtil()
}
def verifyTableEquals(expected: Table, actual: Table): Unit = {
assertEquals(
"Logical plans do not match",
LogicalPlanFormatUtils.formatTempTableId(RelOptUtil.toString(expected.getRelNode)),
LogicalPlanFormatUtils.formatTempTableId(RelOptUtil.toString(actual.getRelNode)))
}
}
abstract class TableTestUtil {
private var counter = 0
def addTable[T: TypeInformation](fields: Expression*): Table = {
counter += 1
addTable[T](s"Table$counter", fields: _*)
}
def addTable[T: TypeInformation](name: String, fields: Expression*): Table
def addFunction[T: TypeInformation](name: String, function: TableFunction[T]): TableFunction[T]
def addFunction(name: String, function: ScalarFunction): Unit
def verifySql(query: String, expected: String): Unit
def verifyTable(resultTable: Table, expected: String): Unit
def verifySchema(resultTable: Table, fields: Seq[(String, TypeInformation[_])]): Unit = {
val actual = resultTable.getSchema
val expected = new TableSchema(fields.map(_._1).toArray, fields.map(_._2).toArray)
assertEquals(expected, actual)
}
// the print methods are for debugging purposes only
def printTable(resultTable: Table): Unit
def printSql(query: String): Unit
protected def verifyString(expected: String, optimized: RelNode) {
val actual = RelOptUtil.toString(optimized)
// we remove the charset for testing because it
// depends on the native machine (Little/Big Endian)
val actualNoCharset = actual.replace("_UTF-16LE'", "'").replace("_UTF-16BE'", "'")
val expectedLines = expected.split("\\n").map(_.trim)
val actualLines = actualNoCharset.split("\\n").map(_.trim)
val expectedMessage = expectedLines.mkString("\\n")
val actualMessage = actualLines.mkString("\\n")
breakable {
for ((expectedLine, actualLine) <- expectedLines.zip(actualLines)) {
if (expectedLine == TableTestUtil.ANY_NODE) {
}
else if (expectedLine == TableTestUtil.ANY_SUBTREE) {
break
}
else if (expectedLine != actualLine) {
throw new ComparisonFailure(null, expectedMessage, actualMessage)
}
}
}
}
def explain(resultTable: Table): String
}
object TableTestUtil {
val ANY_NODE = "%ANY_NODE%"
val ANY_SUBTREE = "%ANY_SUBTREE%"
// this methods are currently just for simplifying string construction,
// we could replace it with logic later
def unaryAnyNode(input: String): String = {
s"""$ANY_NODE
|$input
|""".stripMargin.stripLineEnd
}
def anySubtree(): String = {
ANY_SUBTREE
}
def unaryNode(node: String, input: String, term: String*): String = {
s"""$node(${term.mkString(", ")})
|$input
|""".stripMargin.stripLineEnd
}
def binaryNode(node: String, left: String, right: String, term: String*): String = {
s"""$node(${term.mkString(", ")})
|$left
|$right
|""".stripMargin.stripLineEnd
}
def naryNode(node: String, inputs: List[AnyRef], term: String*): String = {
val strInputs = inputs.mkString("\\n")
s"""$node(${term.mkString(", ")})
|$strInputs
|""".stripMargin.stripLineEnd
}
def values(node: String, term: String*): String = {
s"$node(${term.mkString(", ")})"
}
def term(term: AnyRef, value: AnyRef*): String = {
s"$term=[${value.mkString(", ")}]"
}
def tuples(value: List[AnyRef]*): String = {
val listValues = value.map(listValue => s"{ ${listValue.mkString(", ")} }")
term("tuples", "[" + listValues.mkString(", ") + "]")
}
def batchTableNode(idx: Int): String = {
s"DataSetScan(table=[[_DataSetTable_$idx]])"
}
def streamTableNode(idx: Int): String = {
s"DataStreamScan(table=[[_DataStreamTable_$idx]])"
}
}
case class BatchTableTestUtil() extends TableTestUtil {
val javaEnv = new LocalEnvironment()
val javaTableEnv = TableEnvironment.getTableEnvironment(javaEnv)
val env = new ExecutionEnvironment(javaEnv)
val tableEnv = TableEnvironment.getTableEnvironment(env)
def addTable[T: TypeInformation](
name: String,
fields: Expression*)
: Table = {
val ds = mock(classOf[DataSet[T]])
val jDs = mock(classOf[JDataSet[T]])
when(ds.javaSet).thenReturn(jDs)
val typeInfo: TypeInformation[T] = implicitly[TypeInformation[T]]
when(jDs.getType).thenReturn(typeInfo)
val t = ds.toTable(tableEnv, fields: _*)
tableEnv.registerTable(name, t)
t
}
def addJavaTable[T](typeInfo: TypeInformation[T], name: String, fields: String): Table = {
val jDs = mock(classOf[JDataSet[T]])
when(jDs.getType).thenReturn(typeInfo)
val t = javaTableEnv.fromDataSet(jDs, fields)
javaTableEnv.registerTable(name, t)
t
}
def addFunction[T: TypeInformation](
name: String,
function: TableFunction[T])
: TableFunction[T] = {
tableEnv.registerFunction(name, function)
function
}
def addFunction(name: String, function: ScalarFunction): Unit = {
tableEnv.registerFunction(name, function)
}
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: AggregateFunction[T, ACC]): Unit = {
tableEnv.registerFunction(name, function)
}
def verifySql(query: String, expected: String): Unit = {
verifyTable(tableEnv.sqlQuery(query), expected)
}
def verifyTable(resultTable: Table, expected: String): Unit = {
val relNode = resultTable.getRelNode
val optimized = tableEnv.optimize(relNode)
verifyString(expected, optimized)
}
def verifyJavaSql(query: String, expected: String): Unit = {
verifyJavaTable(javaTableEnv.sqlQuery(query), expected)
}
def verifyJavaTable(resultTable: Table, expected: String): Unit = {
val relNode = resultTable.getRelNode
val optimized = javaTableEnv.optimize(relNode)
verifyString(expected, optimized)
}
def printTable(resultTable: Table): Unit = {
val relNode = resultTable.getRelNode
val optimized = tableEnv.optimize(relNode)
println(RelOptUtil.toString(optimized))
}
def printSql(query: String): Unit = {
printTable(tableEnv.sqlQuery(query))
}
def explain(resultTable: Table): String = {
tableEnv.explain(resultTable)
}
}
case class StreamTableTestUtil() extends TableTestUtil {
val javaEnv = new LocalStreamEnvironment()
javaEnv.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val javaTableEnv = TableEnvironment.getTableEnvironment(javaEnv)
val env = new StreamExecutionEnvironment(javaEnv)
val tableEnv = TableEnvironment.getTableEnvironment(env)
def addTable[T: TypeInformation](
name: String,
fields: Expression*)
: Table = {
val table = env.fromElements().toTable(tableEnv, fields: _*)
tableEnv.registerTable(name, table)
table
}
def addJavaTable[T](typeInfo: TypeInformation[T], name: String, fields: String): Table = {
val stream = javaEnv.addSource(new EmptySource[T], typeInfo)
val table = javaTableEnv.fromDataStream(stream, fields)
javaTableEnv.registerTable(name, table)
table
}
def addFunction[T: TypeInformation](
name: String,
function: TableFunction[T])
: TableFunction[T] = {
tableEnv.registerFunction(name, function)
function
}
def addFunction(name: String, function: ScalarFunction): Unit = {
tableEnv.registerFunction(name, function)
}
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: AggregateFunction[T, ACC]): Unit = {
tableEnv.registerFunction(name, function)
}
def verifySql(query: String, expected: String): Unit = {
verifyTable(tableEnv.sqlQuery(query), expected)
}
def verifySqlPlansIdentical(query1: String, queries: String*): Unit = {
val resultTable1 = tableEnv.sqlQuery(query1)
queries.foreach(s => verify2Tables(resultTable1, tableEnv.sqlQuery(s)))
}
def verifyTable(resultTable: Table, expected: String): Unit = {
val relNode = resultTable.getRelNode
val optimized = tableEnv.optimize(relNode, updatesAsRetraction = false)
verifyString(expected, optimized)
}
def verify2Tables(resultTable1: Table, resultTable2: Table): Unit = {
val relNode1 = resultTable1.getRelNode
val optimized1 = tableEnv.optimize(relNode1, updatesAsRetraction = false)
val relNode2 = resultTable2.getRelNode
val optimized2 = tableEnv.optimize(relNode2, updatesAsRetraction = false)
assertEquals(RelOptUtil.toString(optimized1), RelOptUtil.toString(optimized2))
}
def verifyJavaSql(query: String, expected: String): Unit = {
verifyJavaTable(javaTableEnv.sqlQuery(query), expected)
}
def verifyJavaTable(resultTable: Table, expected: String): Unit = {
val relNode = resultTable.getRelNode
val optimized = javaTableEnv.optimize(relNode, updatesAsRetraction = false)
verifyString(expected, optimized)
}
// the print methods are for debugging purposes only
def printTable(resultTable: Table): Unit = {
val relNode = resultTable.getRelNode
val optimized = tableEnv.optimize(relNode, updatesAsRetraction = false)
println(RelOptUtil.toString(optimized))
}
def printSql(query: String): Unit = {
printTable(tableEnv.sqlQuery(query))
}
def explain(resultTable: Table): String = {
tableEnv.explain(resultTable)
}
}
class EmptySource[T]() extends SourceFunction[T] {
override def run(ctx: SourceFunction.SourceContext[T]): Unit = {
}
override def cancel(): Unit = {
}
}
|
mylog00/flink
|
flink-libraries/flink-table/src/test/scala/org/apache/flink/table/utils/TableTestBase.scala
|
Scala
|
apache-2.0
| 11,910
|
package slick.codegen
import slick.SlickException
import slick.ast.ColumnOption
import slick.{model => m}
import slick.model.ForeignKeyAction
import slick.relational.RelationalProfile
import slick.sql.SqlProfile
/** Base implementation for a Source code String generator */
abstract class AbstractSourceCodeGenerator(model: m.Model)
extends AbstractGenerator[String,String,String](model)
with StringGeneratorHelpers{
/** Generates code for the complete model (not wrapped in a package yet)
@group Basic customization overrides */
def code = {
"import slick.model.ForeignKeyAction\\n" +
( if(tables.exists(_.hlistEnabled)){
"import slick.collection.heterogeneous._\\n"+
"import slick.collection.heterogeneous.syntax._\\n"
} else ""
) +
( if(tables.exists(_.PlainSqlMapper.enabled)){
"// NOTE: GetResult mappers for plain SQL are only generated for tables where Slick knows how to map the types of all columns.\\n"+
"import slick.jdbc.{GetResult => GR}\\n"
} else ""
) +
(if(ddlEnabled){
"\\n/** DDL for all tables. Call .create to execute. */" +
(
if(tables.length > 5)
"\\nlazy val schema: profile.SchemaDescription = Array(" + tables.map(_.TableValue.name + ".schema").mkString(", ") + ").reduceLeft(_ ++ _)"
else if(tables.nonEmpty)
"\\nlazy val schema: profile.SchemaDescription = " + tables.map(_.TableValue.name + ".schema").mkString(" ++ ")
else
"\\nlazy val schema: profile.SchemaDescription = profile.DDL(Nil, Nil)"
) +
"\\n@deprecated(\\"Use .schema instead of .ddl\\", \\"3.0\\")"+
"\\ndef ddl = schema" +
"\\n\\n"
} else "") +
tables.map(_.code.mkString("\\n")).mkString("\\n\\n")
}
protected def tuple(i: Int) = termName(s"_${i+1}")
abstract class TableDef(model: m.Table) extends super.TableDef(model){
def compoundType(types: Seq[String]): String = {
if(hlistEnabled){
def mkHList(types: List[String]): String = types match {
case Nil => "HNil"
case e :: tail => s"HCons[$e," + mkHList(tail) + "]"
}
mkHList(types.toList)
}
else compoundValue(types)
}
def compoundValue(values: Seq[String]): String = {
if(hlistEnabled) values.mkString(" :: ") + " :: HNil"
else if (values.size == 1) values.head
else if(values.size <= 22) s"""(${values.mkString(", ")})"""
else throw new Exception("Cannot generate tuple for > 22 columns, please set hlistEnable=true or override compound.")
}
def factory = if(columns.size == 1) TableClass.elementType else s"${TableClass.elementType}.tupled"
def extractor = s"${TableClass.elementType}.unapply"
trait EntityTypeDef extends super.EntityTypeDef{
def code = {
val args = columns.map(c=>
c.default.map( v =>
s"${c.name}: ${c.exposedType} = $v"
).getOrElse(
s"${c.name}: ${c.exposedType}"
)
).mkString(", ")
if(classEnabled){
val prns = (parents.take(1).map(" extends "+_) ++ parents.drop(1).map(" with "+_)).mkString("")
(if(caseClassFinal) "final " else "") +
s"""case class $name($args)$prns"""
} else {
if(columns.size > 254)
s"type $name = $types" // constructor method would exceed JVM parameter limit
else s"""
type $name = $types
/** Constructor for $name providing default values if available in the database schema. */
def $name($args): $name = {
${compoundValue(columns.map(_.name))}
}
""".trim
}
}
}
trait PlainSqlMapperDef extends super.PlainSqlMapperDef{
def code = {
val positional = compoundValue(columnsPositional.map(c => (if(c.asOption || c.model.nullable)s"<<?[${c.rawType}]"else s"<<[${c.rawType}]")))
val dependencies = columns.map(_.exposedType).distinct.zipWithIndex.map{ case (t,i) => s"""e$i: GR[$t]"""}.mkString(", ")
val rearranged = compoundValue(desiredColumnOrder.map(i => if(hlistEnabled) s"r($i)" else tuple(i)))
def result(args: String) = if(mappingEnabled) s"$factory($args)" else args
val body =
if(autoIncLast && columns.size > 1){
s"""
val r = $positional
import r._
${result(rearranged)} // putting AutoInc last
""".trim
} else
result(positional)
s"""
implicit def ${name}(implicit $dependencies): GR[${TableClass.elementType}] = GR{
prs => import prs._
${indent(body)}
}
""".trim
}
}
trait TableClassDef extends super.TableClassDef{
def star = {
val struct = compoundValue(columns.map(c=>if(c.asOption)s"Rep.Some(${c.name})" else s"${c.name}"))
val rhs = if(mappingEnabled) s"$struct <> ($factory, $extractor)" else struct
s"def * = $rhs"
}
def option = {
val struct = compoundValue(columns.map(c=>if(c.model.nullable)s"${c.name}" else s"Rep.Some(${c.name})"))
val rhs = if(mappingEnabled) s"""$struct.shaped.<>($optionFactory, (_:Any) => throw new Exception("Inserting into ? projection not supported."))""" else struct
s"def ? = $rhs"
}
def optionFactory = {
val accessors = columns.zipWithIndex.map{ case(c,i) =>
val accessor = if(columns.size > 1) tuple(i) else "r"
if(c.asOption || c.model.nullable) accessor else s"$accessor.get"
}
val fac = s"$factory(${compoundValue(accessors)})"
val discriminator = columns.zipWithIndex.collect{ case (c,i) if !c.model.nullable => if(columns.size > 1) tuple(i) else "r" }.headOption
val expr = discriminator.map(d => s"$d.map(_=> $fac)").getOrElse(s"None")
if(columns.size > 1)
s"{r=>import r._; $expr}"
else
s"r => $expr"
}
def code = {
val prns = parents.map(" with " + _).mkString("")
val args = model.name.schema.map(n => s"""Some("$n")""") ++ Seq("\\""+model.name.table+"\\"")
s"""
class $name(_tableTag: Tag) extends profile.api.Table[$elementType](_tableTag, ${args.mkString(", ")})$prns {
${indent(body.map(_.mkString("\\n")).mkString("\\n\\n"))}
}
""".trim()
}
}
trait TableValueDef extends super.TableValueDef{
def code = s"lazy val $name = new TableQuery(tag => new ${TableClass.name}(tag))"
}
class ColumnDef(model: m.Column) extends super.ColumnDef(model){
import ColumnOption._
import RelationalProfile.ColumnOption._
import SqlProfile.ColumnOption._
def columnOptionCode = {
case ColumnOption.PrimaryKey => Some(s"O.PrimaryKey")
case Default(value) => Some(s"O.Default(${default.get})") // .get is safe here
case SqlType(dbType) => Some(s"""O.SqlType("$dbType")""")
case Length(length,varying) => Some(s"O.Length($length,varying=$varying)")
case AutoInc => Some(s"O.AutoInc")
case Unique => Some(s"O.Unique")
case NotNull|Nullable => throw new SlickException( s"Please don't use Nullable or NotNull column options. Use an Option type, respectively the nullable flag in Slick's model model Column." )
case o => None // throw new SlickException( s"Don't know how to generate code for unexpected ColumnOption $o." )
}
def defaultCode = {
case Some(v) => s"Some(${defaultCode(v)})"
case s: String if rawType == "java.sql.Timestamp" => s
case s:String => "\\""+s.replaceAll("\\"", """\\\\"""")+"\\""
case None => s"None"
case v:Byte => s"$v"
case v:Int => s"$v"
case v:Long => s"${v}L"
case v:Float => s"${v}F"
case v:Double => s"$v"
case v:Boolean => s"$v"
case v:Short => s"$v"
case v:Char => s"'$v'"
case v:BigDecimal => s"""scala.math.BigDecimal(\\"$v\\")"""
case v: java.sql.Timestamp => s"""java.sql.Timestamp.valueOf("${v}")"""
case v => throw new SlickException( s"Dont' know how to generate code for default value $v of ${v.getClass}. Override def defaultCode to render the value." )
}
// Explicit type to allow overloading existing Slick method names.
// Explicit type argument for better error message when implicit type mapper not found.
def code = s"""val $name: Rep[$actualType] = column[$actualType]("${model.name}"${options.map(", "+_).mkString("")})"""
}
class PrimaryKeyDef(model: m.PrimaryKey) extends super.PrimaryKeyDef(model){
def code = s"""val $name = primaryKey("$dbName", ${compoundValue(columns.map(_.name))})"""
}
class ForeignKeyDef(model: m.ForeignKey) extends super.ForeignKeyDef(model){
def actionCode(action: ForeignKeyAction) = action match{
case ForeignKeyAction.Cascade => "ForeignKeyAction.Cascade"
case ForeignKeyAction.Restrict => "ForeignKeyAction.Restrict"
case ForeignKeyAction.NoAction => "ForeignKeyAction.NoAction"
case ForeignKeyAction.SetNull => "ForeignKeyAction.SetNull"
case ForeignKeyAction.SetDefault => "ForeignKeyAction.SetDefault"
}
def code = {
val pkTable = referencedTable.TableValue.name
val (pkColumns, fkColumns) = (referencedColumns, referencingColumns).zipped.map { (p, f) =>
val pk = s"r.${p.name}"
val fk = f.name
if(p.model.nullable && !f.model.nullable) (pk, s"Rep.Some($fk)")
else if(!p.model.nullable && f.model.nullable) (s"Rep.Some($pk)", fk)
else (pk, fk)
}.unzip
s"""lazy val $name = foreignKey("$dbName", ${compoundValue(fkColumns)}, $pkTable)(r => ${compoundValue(pkColumns)}, onUpdate=${onUpdate}, onDelete=${onDelete})"""
}
}
class IndexDef(model: m.Index) extends super.IndexDef(model){
def code = {
val unique = if(model.unique) s", unique=true" else ""
s"""val $name = index("$dbName", ${compoundValue(columns.map(_.name))}$unique)"""
}
}
}
}
trait StringGeneratorHelpers extends slick.codegen.GeneratorHelpers[String,String,String]{
def docWithCode(doc: String, code:String): String = (if(doc != "") "/** "+doc.split("\\n").mkString("\\n * ")+" */\\n" else "") + code
final def optionType(t: String) = s"Option[$t]"
def parseType(tpe: String): String = tpe
def shouldQuoteIdentifier(s: String) = {
def isIdent =
if(s.isEmpty) false
else Character.isJavaIdentifierStart(s.head) && s.tail.forall(Character.isJavaIdentifierPart)
scalaKeywords.contains(s) || !isIdent
}
def termName( name: String ) = if(shouldQuoteIdentifier(name)) "`"+name+"`" else name
def typeName( name: String ) = if(shouldQuoteIdentifier(name)) "`"+name+"`" else name
}
|
marko-asplund/slick
|
slick-codegen/src/main/scala/slick/codegen/AbstractSourceCodeGenerator.scala
|
Scala
|
bsd-2-clause
| 10,794
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.log
import java.io._
import java.util.Properties
import org.apache.kafka.common.errors.{CorruptRecordException, OffsetOutOfRangeException, RecordBatchTooLargeException, RecordTooLargeException}
import kafka.api.ApiVersion
import kafka.common.LongRef
import org.junit.Assert._
import org.scalatest.junit.JUnitSuite
import org.junit.{After, Before, Test}
import kafka.message._
import kafka.utils._
import kafka.server.KafkaConfig
import org.apache.kafka.common.utils.Utils
class LogTest extends JUnitSuite {
val tmpDir = TestUtils.tempDir()
val logDir = TestUtils.randomPartitionLogDir(tmpDir)
val time = new MockTime()
var config: KafkaConfig = null
val logConfig = LogConfig()
@Before
def setUp() {
val props = TestUtils.createBrokerConfig(0, "127.0.0.1:1", port = -1)
config = KafkaConfig.fromProps(props)
}
@After
def tearDown() {
Utils.delete(tmpDir)
}
def createEmptyLogs(dir: File, offsets: Int*) {
for(offset <- offsets) {
Log.logFilename(dir, offset).createNewFile()
Log.indexFilename(dir, offset).createNewFile()
}
}
/**
* Tests for time based log roll. This test appends messages then changes the time
* using the mock clock to force the log to roll and checks the number of segments.
*/
@Test
def testTimeBasedLogRoll() {
val set = TestUtils.singleMessageSet("test".getBytes)
val logProps = new Properties()
logProps.put(LogConfig.SegmentMsProp, (1 * 60 * 60L): java.lang.Long)
// create a log
val log = new Log(logDir,
LogConfig(logProps),
recoveryPoint = 0L,
scheduler = time.scheduler,
time = time)
assertEquals("Log begins with a single empty segment.", 1, log.numberOfSegments)
// Test the segment rolling behavior when messages do not have a timestamp.
time.sleep(log.config.segmentMs + 1)
log.append(set)
assertEquals("Log doesn't roll if doing so creates an empty segment.", 1, log.numberOfSegments)
log.append(set)
assertEquals("Log rolls on this append since time has expired.", 2, log.numberOfSegments)
for(numSegments <- 3 until 5) {
time.sleep(log.config.segmentMs + 1)
log.append(set)
assertEquals("Changing time beyond rollMs and appending should create a new segment.", numSegments, log.numberOfSegments)
}
// Append a message with timestamp to a segment whose first messgae do not have a timestamp.
val setWithTimestamp =
TestUtils.singleMessageSet(payload = "test".getBytes, timestamp = time.milliseconds + log.config.segmentMs + 1)
log.append(setWithTimestamp)
assertEquals("Segment should not have been rolled out because the log rolling should be based on wall clock.", 4, log.numberOfSegments)
// Test the segment rolling behavior when messages have timestamps.
time.sleep(log.config.segmentMs + 1)
log.append(setWithTimestamp)
assertEquals("A new segment should have been rolled out", 5, log.numberOfSegments)
// move the wall clock beyond log rolling time
time.sleep(log.config.segmentMs + 1)
log.append(setWithTimestamp)
assertEquals("Log should not roll because the roll should depend on timestamp of the first message.", 5, log.numberOfSegments)
val setWithExpiredTimestamp = TestUtils.singleMessageSet(payload = "test".getBytes, timestamp = time.milliseconds)
log.append(setWithExpiredTimestamp)
assertEquals("Log should roll because the timestamp in the message should make the log segment expire.", 6, log.numberOfSegments)
val numSegments = log.numberOfSegments
time.sleep(log.config.segmentMs + 1)
log.append(new ByteBufferMessageSet())
assertEquals("Appending an empty message set should not roll log even if succient time has passed.", numSegments, log.numberOfSegments)
}
/**
* Test for jitter s for time based log roll. This test appends messages then changes the time
* using the mock clock to force the log to roll and checks the number of segments.
*/
@Test
def testTimeBasedLogRollJitter() {
val set = TestUtils.singleMessageSet("test".getBytes)
val maxJitter = 20 * 60L
val logProps = new Properties()
logProps.put(LogConfig.SegmentMsProp, 1 * 60 * 60: java.lang.Long)
logProps.put(LogConfig.SegmentJitterMsProp, maxJitter: java.lang.Long)
// create a log
val log = new Log(logDir,
LogConfig(logProps),
recoveryPoint = 0L,
scheduler = time.scheduler,
time = time)
assertEquals("Log begins with a single empty segment.", 1, log.numberOfSegments)
log.append(set)
time.sleep(log.config.segmentMs - maxJitter)
log.append(set)
assertEquals("Log does not roll on this append because it occurs earlier than max jitter", 1, log.numberOfSegments)
time.sleep(maxJitter - log.activeSegment.rollJitterMs + 1)
log.append(set)
assertEquals("Log should roll after segmentMs adjusted by random jitter", 2, log.numberOfSegments)
}
/**
* Test that appending more than the maximum segment size rolls the log
*/
@Test
def testSizeBasedLogRoll() {
val set = TestUtils.singleMessageSet("test".getBytes)
val setSize = set.sizeInBytes
val msgPerSeg = 10
val segmentSize = msgPerSeg * (setSize - 1) // each segment will be 10 messages
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, segmentSize: java.lang.Integer)
// We use need to use magic value 1 here because the test is message size sensitive.
logProps.put(LogConfig.MessageFormatVersionProp, ApiVersion.latestVersion.toString)
// create a log
val log = new Log(logDir, LogConfig(logProps), recoveryPoint = 0L, time.scheduler, time = time)
assertEquals("There should be exactly 1 segment.", 1, log.numberOfSegments)
// segments expire in size
for (_ <- 1 to (msgPerSeg + 1)) {
log.append(set)
}
assertEquals("There should be exactly 2 segments.", 2, log.numberOfSegments)
}
/**
* Test that we can open and append to an empty log
*/
@Test
def testLoadEmptyLog() {
createEmptyLogs(logDir, 0)
val log = new Log(logDir, logConfig, recoveryPoint = 0L, time.scheduler, time = time)
log.append(TestUtils.singleMessageSet("test".getBytes))
}
/**
* This test case appends a bunch of messages and checks that we can read them all back using sequential offsets.
*/
@Test
def testAppendAndReadWithSequentialOffsets() {
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, 71: java.lang.Integer)
// We use need to use magic value 1 here because the test is message size sensitive.
logProps.put(LogConfig.MessageFormatVersionProp, ApiVersion.latestVersion.toString)
val log = new Log(logDir, LogConfig(logProps), recoveryPoint = 0L, time.scheduler, time = time)
val messages = (0 until 100 by 2).map(id => new Message(id.toString.getBytes)).toArray
for(i <- 0 until messages.length)
log.append(new ByteBufferMessageSet(NoCompressionCodec, messages = messages(i)))
for(i <- 0 until messages.length) {
val read = log.read(i, 100, Some(i+1)).messageSet.head
assertEquals("Offset read should match order appended.", i, read.offset)
assertEquals("Message should match appended.", messages(i), read.message)
}
assertEquals("Reading beyond the last message returns nothing.", 0, log.read(messages.length, 100, None).messageSet.size)
}
/**
* This test appends a bunch of messages with non-sequential offsets and checks that we can read the correct message
* from any offset less than the logEndOffset including offsets not appended.
*/
@Test
def testAppendAndReadWithNonSequentialOffsets() {
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, 71: java.lang.Integer)
val log = new Log(logDir, LogConfig(logProps), recoveryPoint = 0L, time.scheduler, time = time)
val messageIds = ((0 until 50) ++ (50 until 200 by 7)).toArray
val messages = messageIds.map(id => new Message(id.toString.getBytes))
// now test the case that we give the offsets and use non-sequential offsets
for(i <- 0 until messages.length)
log.append(new ByteBufferMessageSet(NoCompressionCodec, new LongRef(messageIds(i)), messages = messages(i)), assignOffsets = false)
for(i <- 50 until messageIds.max) {
val idx = messageIds.indexWhere(_ >= i)
val read = log.read(i, 100, None).messageSet.head
assertEquals("Offset read should match message id.", messageIds(idx), read.offset)
assertEquals("Message should match appended.", messages(idx), read.message)
}
}
/**
* This test covers an odd case where we have a gap in the offsets that falls at the end of a log segment.
* Specifically we create a log where the last message in the first segment has offset 0. If we
* then read offset 1, we should expect this read to come from the second segment, even though the
* first segment has the greatest lower bound on the offset.
*/
@Test
def testReadAtLogGap() {
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, 300: java.lang.Integer)
val log = new Log(logDir, LogConfig(logProps), recoveryPoint = 0L, time.scheduler, time = time)
// keep appending until we have two segments with only a single message in the second segment
while(log.numberOfSegments == 1)
log.append(new ByteBufferMessageSet(NoCompressionCodec, messages = new Message("42".getBytes)))
// now manually truncate off all but one message from the first segment to create a gap in the messages
log.logSegments.head.truncateTo(1)
assertEquals("A read should now return the last message in the log", log.logEndOffset - 1, log.read(1, 200, None).messageSet.head.offset)
}
@Test
def testReadWithMinMessage() {
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, 71: java.lang.Integer)
val log = new Log(logDir, LogConfig(logProps), recoveryPoint = 0L, time.scheduler, time = time)
val messageIds = ((0 until 50) ++ (50 until 200 by 7)).toArray
val messages = messageIds.map(id => new Message(id.toString.getBytes))
// now test the case that we give the offsets and use non-sequential offsets
for (i <- 0 until messages.length)
log.append(new ByteBufferMessageSet(NoCompressionCodec, new LongRef(messageIds(i)), messages = messages(i)),
assignOffsets = false)
for (i <- 50 until messageIds.max) {
val idx = messageIds.indexWhere(_ >= i)
val reads = Seq(
log.read(i, 1, minOneMessage = true),
log.read(i, 100, minOneMessage = true),
log.read(i, 100, Some(10000), minOneMessage = true)
).map(_.messageSet.head)
reads.foreach { read =>
assertEquals("Offset read should match message id.", messageIds(idx), read.offset)
assertEquals("Message should match appended.", messages(idx), read.message)
}
assertEquals(Seq.empty, log.read(i, 1, Some(1), minOneMessage = true).messageSet.toIndexedSeq)
}
}
@Test
def testReadWithTooSmallMaxLength() {
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, 71: java.lang.Integer)
val log = new Log(logDir, LogConfig(logProps), recoveryPoint = 0L, time.scheduler, time = time)
val messageIds = ((0 until 50) ++ (50 until 200 by 7)).toArray
val messages = messageIds.map(id => new Message(id.toString.getBytes))
// now test the case that we give the offsets and use non-sequential offsets
for (i <- 0 until messages.length)
log.append(new ByteBufferMessageSet(NoCompressionCodec, new LongRef(messageIds(i)), messages = messages(i)),
assignOffsets = false)
for (i <- 50 until messageIds.max) {
assertEquals(MessageSet.Empty, log.read(i, 0).messageSet)
// we return an incomplete message instead of an empty one for the case below
// we use this mechanism to tell consumers of the fetch request version 2 and below that the message size is
// larger than the fetch size
// in fetch request version 3, we no longer need this as we return oversized messages from the first non-empty
// partition
val fetchInfo = log.read(i, 1)
assertTrue(fetchInfo.firstMessageSetIncomplete)
assertTrue(fetchInfo.messageSet.isInstanceOf[FileMessageSet])
assertEquals(1, fetchInfo.messageSet.sizeInBytes)
}
}
/**
* Test reading at the boundary of the log, specifically
* - reading from the logEndOffset should give an empty message set
* - reading from the maxOffset should give an empty message set
* - reading beyond the log end offset should throw an OffsetOutOfRangeException
*/
@Test
def testReadOutOfRange() {
createEmptyLogs(logDir, 1024)
val logProps = new Properties()
// set up replica log starting with offset 1024 and with one message (at offset 1024)
logProps.put(LogConfig.SegmentBytesProp, 1024: java.lang.Integer)
val log = new Log(logDir, LogConfig(logProps), recoveryPoint = 0L, time.scheduler, time = time)
log.append(new ByteBufferMessageSet(NoCompressionCodec, messages = new Message("42".getBytes)))
assertEquals("Reading at the log end offset should produce 0 byte read.", 0, log.read(1025, 1000).messageSet.sizeInBytes)
try {
log.read(0, 1000)
fail("Reading below the log start offset should throw OffsetOutOfRangeException")
} catch {
case _: OffsetOutOfRangeException => // This is good.
}
try {
log.read(1026, 1000)
fail("Reading at beyond the log end offset should throw OffsetOutOfRangeException")
} catch {
case _: OffsetOutOfRangeException => // This is good.
}
assertEquals("Reading from below the specified maxOffset should produce 0 byte read.", 0, log.read(1025, 1000, Some(1024)).messageSet.sizeInBytes)
}
/**
* Test that covers reads and writes on a multisegment log. This test appends a bunch of messages
* and then reads them all back and checks that the message read and offset matches what was appended.
*/
@Test
def testLogRolls() {
/* create a multipart log with 100 messages */
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, 100: java.lang.Integer)
val log = new Log(logDir, LogConfig(logProps), recoveryPoint = 0L, time.scheduler, time = time)
val numMessages = 100
val messageSets = (0 until numMessages).map(i => TestUtils.singleMessageSet(i.toString.getBytes))
messageSets.foreach(log.append(_))
log.flush
/* do successive reads to ensure all our messages are there */
var offset = 0L
for(i <- 0 until numMessages) {
val messages = log.read(offset, 1024*1024).messageSet
assertEquals("Offsets not equal", offset, messages.head.offset)
assertEquals("Messages not equal at offset " + offset, messageSets(i).head.message,
messages.head.message.toFormatVersion(messageSets(i).head.message.magic))
offset = messages.head.offset + 1
}
val lastRead = log.read(startOffset = numMessages, maxLength = 1024*1024, maxOffset = Some(numMessages + 1)).messageSet
assertEquals("Should be no more messages", 0, lastRead.size)
// check that rolling the log forced a flushed the log--the flush is asyn so retry in case of failure
TestUtils.retry(1000L){
assertTrue("Log role should have forced flush", log.recoveryPoint >= log.activeSegment.baseOffset)
}
}
/**
* Test reads at offsets that fall within compressed message set boundaries.
*/
@Test
def testCompressedMessages() {
/* this log should roll after every messageset */
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, 100: java.lang.Integer)
val log = new Log(logDir, LogConfig(logProps), recoveryPoint = 0L, time.scheduler, time = time)
/* append 2 compressed message sets, each with two messages giving offsets 0, 1, 2, 3 */
log.append(new ByteBufferMessageSet(DefaultCompressionCodec, new Message("hello".getBytes), new Message("there".getBytes)))
log.append(new ByteBufferMessageSet(DefaultCompressionCodec, new Message("alpha".getBytes), new Message("beta".getBytes)))
def read(offset: Int) = ByteBufferMessageSet.deepIterator(log.read(offset, 4096).messageSet.head)
/* we should always get the first message in the compressed set when reading any offset in the set */
assertEquals("Read at offset 0 should produce 0", 0, read(0).next().offset)
assertEquals("Read at offset 1 should produce 0", 0, read(1).next().offset)
assertEquals("Read at offset 2 should produce 2", 2, read(2).next().offset)
assertEquals("Read at offset 3 should produce 2", 2, read(3).next().offset)
}
/**
* Test garbage collecting old segments
*/
@Test
def testThatGarbageCollectingSegmentsDoesntChangeOffset() {
for(messagesToAppend <- List(0, 1, 25)) {
logDir.mkdirs()
// first test a log segment starting at 0
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, 100: java.lang.Integer)
logProps.put(LogConfig.RetentionMsProp, 0: java.lang.Integer)
val log = new Log(logDir, LogConfig(logProps), recoveryPoint = 0L, time.scheduler, time = time)
for(i <- 0 until messagesToAppend)
log.append(TestUtils.singleMessageSet(payload = i.toString.getBytes, timestamp = time.milliseconds - 10))
val currOffset = log.logEndOffset
assertEquals(currOffset, messagesToAppend)
// time goes by; the log file is deleted
log.deleteOldSegments()
assertEquals("Deleting segments shouldn't have changed the logEndOffset", currOffset, log.logEndOffset)
assertEquals("We should still have one segment left", 1, log.numberOfSegments)
assertEquals("Further collection shouldn't delete anything", 0, log.deleteOldSegments())
assertEquals("Still no change in the logEndOffset", currOffset, log.logEndOffset)
assertEquals("Should still be able to append and should get the logEndOffset assigned to the new append",
currOffset,
log.append(TestUtils.singleMessageSet("hello".getBytes)).firstOffset)
// cleanup the log
log.delete()
}
}
/**
* MessageSet size shouldn't exceed the config.segmentSize, check that it is properly enforced by
* appending a message set larger than the config.segmentSize setting and checking that an exception is thrown.
*/
@Test
def testMessageSetSizeCheck() {
val messageSet = new ByteBufferMessageSet(NoCompressionCodec, new Message ("You".getBytes), new Message("bethe".getBytes))
// append messages to log
val configSegmentSize = messageSet.sizeInBytes - 1
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, configSegmentSize: java.lang.Integer)
// We use need to use magic value 1 here because the test is message size sensitive.
logProps.put(LogConfig.MessageFormatVersionProp, ApiVersion.latestVersion.toString)
val log = new Log(logDir, LogConfig(logProps), recoveryPoint = 0L, time.scheduler, time = time)
try {
log.append(messageSet)
fail("message set should throw RecordBatchTooLargeException.")
} catch {
case _: RecordBatchTooLargeException => // this is good
}
}
@Test
def testCompactedTopicConstraints() {
val keyedMessage = new Message(bytes = "this message has a key".getBytes, key = "and here it is".getBytes, Message.NoTimestamp, Message.CurrentMagicValue)
val anotherKeyedMessage = new Message(bytes = "this message also has a key".getBytes, key ="another key".getBytes, Message.NoTimestamp, Message.CurrentMagicValue)
val unkeyedMessage = new Message(bytes = "this message does not have a key".getBytes)
val messageSetWithUnkeyedMessage = new ByteBufferMessageSet(NoCompressionCodec, unkeyedMessage, keyedMessage)
val messageSetWithOneUnkeyedMessage = new ByteBufferMessageSet(NoCompressionCodec, unkeyedMessage)
val messageSetWithCompressedKeyedMessage = new ByteBufferMessageSet(GZIPCompressionCodec, keyedMessage)
val messageSetWithCompressedUnkeyedMessage = new ByteBufferMessageSet(GZIPCompressionCodec, keyedMessage, unkeyedMessage)
val messageSetWithKeyedMessage = new ByteBufferMessageSet(NoCompressionCodec, keyedMessage)
val messageSetWithKeyedMessages = new ByteBufferMessageSet(NoCompressionCodec, keyedMessage, anotherKeyedMessage)
val logProps = new Properties()
logProps.put(LogConfig.CleanupPolicyProp, LogConfig.Compact)
val log = new Log(logDir, LogConfig(logProps), recoveryPoint = 0L, time.scheduler, time)
try {
log.append(messageSetWithUnkeyedMessage)
fail("Compacted topics cannot accept a message without a key.")
} catch {
case _: CorruptRecordException => // this is good
}
try {
log.append(messageSetWithOneUnkeyedMessage)
fail("Compacted topics cannot accept a message without a key.")
} catch {
case _: CorruptRecordException => // this is good
}
try {
log.append(messageSetWithCompressedUnkeyedMessage)
fail("Compacted topics cannot accept a message without a key.")
} catch {
case _: CorruptRecordException => // this is good
}
// the following should succeed without any InvalidMessageException
log.append(messageSetWithKeyedMessage)
log.append(messageSetWithKeyedMessages)
log.append(messageSetWithCompressedKeyedMessage)
}
/**
* We have a max size limit on message appends, check that it is properly enforced by appending a message larger than the
* setting and checking that an exception is thrown.
*/
@Test
def testMessageSizeCheck() {
val first = new ByteBufferMessageSet(NoCompressionCodec, new Message ("You".getBytes), new Message("bethe".getBytes))
val second = new ByteBufferMessageSet(NoCompressionCodec, new Message("change (I need more bytes)".getBytes))
// append messages to log
val maxMessageSize = second.sizeInBytes - 1
val logProps = new Properties()
logProps.put(LogConfig.MaxMessageBytesProp, maxMessageSize: java.lang.Integer)
val log = new Log(logDir, LogConfig(logProps), recoveryPoint = 0L, time.scheduler, time = time)
// should be able to append the small message
log.append(first)
try {
log.append(second)
fail("Second message set should throw MessageSizeTooLargeException.")
} catch {
case _: RecordTooLargeException => // this is good
}
}
/**
* Append a bunch of messages to a log and then re-open it both with and without recovery and check that the log re-initializes correctly.
*/
@Test
def testLogRecoversToCorrectOffset() {
val numMessages = 100
val messageSize = 100
val segmentSize = 7 * messageSize
val indexInterval = 3 * messageSize
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, segmentSize: java.lang.Integer)
logProps.put(LogConfig.IndexIntervalBytesProp, indexInterval: java.lang.Integer)
logProps.put(LogConfig.SegmentIndexBytesProp, 4096: java.lang.Integer)
val config = LogConfig(logProps)
var log = new Log(logDir, config, recoveryPoint = 0L, time.scheduler, time)
for(i <- 0 until numMessages)
log.append(TestUtils.singleMessageSet(payload = TestUtils.randomBytes(messageSize),
timestamp = time.milliseconds + i * 10))
assertEquals("After appending %d messages to an empty log, the log end offset should be %d".format(numMessages, numMessages), numMessages, log.logEndOffset)
val lastIndexOffset = log.activeSegment.index.lastOffset
val numIndexEntries = log.activeSegment.index.entries
val lastOffset = log.logEndOffset
// After segment is closed, the last entry in the time index should be (largest timestamp -> last offset).
val lastTimeIndexOffset = log.logEndOffset - 1
val lastTimeIndexTimestamp = log.activeSegment.largestTimestamp
// Depending on when the last time index entry is inserted, an entry may or may not be inserted into the time index.
val numTimeIndexEntries = log.activeSegment.timeIndex.entries + {
if (log.activeSegment.timeIndex.lastEntry.offset == log.logEndOffset - 1) 0 else 1
}
log.close()
def verifyRecoveredLog(log: Log) {
assertEquals(s"Should have $numMessages messages when log is reopened w/o recovery", numMessages, log.logEndOffset)
assertEquals("Should have same last index offset as before.", lastIndexOffset, log.activeSegment.index.lastOffset)
assertEquals("Should have same number of index entries as before.", numIndexEntries, log.activeSegment.index.entries)
assertEquals("Should have same last time index timestamp", lastTimeIndexTimestamp, log.activeSegment.timeIndex.lastEntry.timestamp)
assertEquals("Should have same last time index offset", lastTimeIndexOffset, log.activeSegment.timeIndex.lastEntry.offset)
assertEquals("Should have same number of time index entries as before.", numTimeIndexEntries, log.activeSegment.timeIndex.entries)
}
log = new Log(logDir, config, recoveryPoint = lastOffset, time.scheduler, time)
verifyRecoveredLog(log)
log.close()
// test recovery case
log = new Log(logDir, config, recoveryPoint = 0L, time.scheduler, time)
verifyRecoveredLog(log)
log.close()
}
/**
* Test building the time index on the follower by setting assignOffsets to false.
*/
@Test
def testBuildTimeIndexWhenNotAssigningOffsets() {
val numMessages = 100
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, 10000: java.lang.Integer)
logProps.put(LogConfig.IndexIntervalBytesProp, 1: java.lang.Integer)
val config = LogConfig(logProps)
val log = new Log(logDir, config, recoveryPoint = 0L, time.scheduler, time)
val messages = (0 until numMessages).map { i =>
new ByteBufferMessageSet(NoCompressionCodec, new LongRef(100 + i), new Message(i.toString.getBytes(), time.milliseconds + i, Message.MagicValue_V1))
}
messages.foreach(log.append(_, assignOffsets = false))
val timeIndexEntries = log.logSegments.foldLeft(0) { (entries, segment) => entries + segment.timeIndex.entries }
assertEquals(s"There should be ${numMessages - 1} time index entries", numMessages - 1, timeIndexEntries)
assertEquals(s"The last time index entry should have timestamp ${time.milliseconds + numMessages - 1}",
time.milliseconds + numMessages - 1, log.activeSegment.timeIndex.lastEntry.timestamp)
}
/**
* Test that if we manually delete an index segment it is rebuilt when the log is re-opened
*/
@Test
def testIndexRebuild() {
// publish the messages and close the log
val numMessages = 200
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, 200: java.lang.Integer)
logProps.put(LogConfig.IndexIntervalBytesProp, 1: java.lang.Integer)
val config = LogConfig(logProps)
var log = new Log(logDir, config, recoveryPoint = 0L, time.scheduler, time)
for(i <- 0 until numMessages)
log.append(TestUtils.singleMessageSet(payload = TestUtils.randomBytes(10), timestamp = time.milliseconds + i * 10))
val indexFiles = log.logSegments.map(_.index.file)
val timeIndexFiles = log.logSegments.map(_.timeIndex.file)
log.close()
// delete all the index files
indexFiles.foreach(_.delete())
timeIndexFiles.foreach(_.delete())
// reopen the log
log = new Log(logDir, config, recoveryPoint = 0L, time.scheduler, time)
assertEquals("Should have %d messages when log is reopened".format(numMessages), numMessages, log.logEndOffset)
assertTrue("The index should have been rebuilt", log.logSegments.head.index.entries > 0)
assertTrue("The time index should have been rebuilt", log.logSegments.head.timeIndex.entries > 0)
for(i <- 0 until numMessages) {
assertEquals(i, log.read(i, 100, None).messageSet.head.offset)
if (i == 0)
assertEquals(log.logSegments.head.baseOffset, log.fetchOffsetsByTimestamp(time.milliseconds + i * 10).get.offset)
else
assertEquals(i, log.fetchOffsetsByTimestamp(time.milliseconds + i * 10).get.offset)
}
log.close()
}
/**
* Test that if messages format version of the messages in a segment is before 0.10.0, the time index should be empty.
*/
@Test
def testRebuildTimeIndexForOldMessages() {
val numMessages = 200
val segmentSize = 200
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, segmentSize: java.lang.Integer)
logProps.put(LogConfig.IndexIntervalBytesProp, 1: java.lang.Integer)
logProps.put(LogConfig.MessageFormatVersionProp, "0.9.0")
val config = LogConfig(logProps)
var log = new Log(logDir, config, recoveryPoint = 0L, time.scheduler, time)
for(i <- 0 until numMessages)
log.append(TestUtils.singleMessageSet(payload = TestUtils.randomBytes(10), timestamp = time.milliseconds + i * 10))
val timeIndexFiles = log.logSegments.map(_.timeIndex.file)
log.close()
// Delete the time index.
timeIndexFiles.foreach(_.delete())
// The rebuilt time index should be empty
log = new Log(logDir, config, recoveryPoint = numMessages + 1, time.scheduler, time)
val segArray = log.logSegments.toArray
for (i <- 0 until segArray.size - 1)
assertEquals("The time index should be empty", 0, segArray(i).timeIndex.entries)
}
/**
* Test that if we have corrupted an index segment it is rebuilt when the log is re-opened
*/
@Test
def testCorruptIndexRebuild() {
// publish the messages and close the log
val numMessages = 200
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, 200: java.lang.Integer)
logProps.put(LogConfig.IndexIntervalBytesProp, 1: java.lang.Integer)
val config = LogConfig(logProps)
var log = new Log(logDir, config, recoveryPoint = 0L, time.scheduler, time)
for(i <- 0 until numMessages)
log.append(TestUtils.singleMessageSet(payload = TestUtils.randomBytes(10), timestamp = time.milliseconds + i * 10))
val indexFiles = log.logSegments.map(_.index.file)
val timeIndexFiles = log.logSegments.map(_.timeIndex.file)
log.close()
// corrupt all the index files
for( file <- indexFiles) {
val bw = new BufferedWriter(new FileWriter(file))
bw.write(" ")
bw.close()
}
// corrupt all the index files
for( file <- timeIndexFiles) {
val bw = new BufferedWriter(new FileWriter(file))
bw.write(" ")
bw.close()
}
// reopen the log
log = new Log(logDir, config, recoveryPoint = 200L, time.scheduler, time)
assertEquals("Should have %d messages when log is reopened".format(numMessages), numMessages, log.logEndOffset)
for(i <- 0 until numMessages) {
assertEquals(i, log.read(i, 100, None).messageSet.head.offset)
if (i == 0)
assertEquals(log.logSegments.head.baseOffset, log.fetchOffsetsByTimestamp(time.milliseconds + i * 10).get.offset)
else
assertEquals(i, log.fetchOffsetsByTimestamp(time.milliseconds + i * 10).get.offset)
}
log.close()
}
/**
* Test the Log truncate operations
*/
@Test
def testTruncateTo() {
val set = TestUtils.singleMessageSet("test".getBytes)
val setSize = set.sizeInBytes
val msgPerSeg = 10
val segmentSize = msgPerSeg * setSize // each segment will be 10 messages
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, segmentSize: java.lang.Integer)
// create a log
val log = new Log(logDir, LogConfig(logProps), recoveryPoint = 0L, scheduler = time.scheduler, time = time)
assertEquals("There should be exactly 1 segment.", 1, log.numberOfSegments)
for (_ <- 1 to msgPerSeg)
log.append(set)
assertEquals("There should be exactly 1 segments.", 1, log.numberOfSegments)
assertEquals("Log end offset should be equal to number of messages", msgPerSeg, log.logEndOffset)
val lastOffset = log.logEndOffset
val size = log.size
log.truncateTo(log.logEndOffset) // keep the entire log
assertEquals("Should not change offset", lastOffset, log.logEndOffset)
assertEquals("Should not change log size", size, log.size)
log.truncateTo(log.logEndOffset + 1) // try to truncate beyond lastOffset
assertEquals("Should not change offset but should log error", lastOffset, log.logEndOffset)
assertEquals("Should not change log size", size, log.size)
log.truncateTo(msgPerSeg/2) // truncate somewhere in between
assertEquals("Should change offset", log.logEndOffset, msgPerSeg/2)
assertTrue("Should change log size", log.size < size)
log.truncateTo(0) // truncate the entire log
assertEquals("Should change offset", 0, log.logEndOffset)
assertEquals("Should change log size", 0, log.size)
for (_ <- 1 to msgPerSeg)
log.append(set)
assertEquals("Should be back to original offset", log.logEndOffset, lastOffset)
assertEquals("Should be back to original size", log.size, size)
log.truncateFullyAndStartAt(log.logEndOffset - (msgPerSeg - 1))
assertEquals("Should change offset", log.logEndOffset, lastOffset - (msgPerSeg - 1))
assertEquals("Should change log size", log.size, 0)
for (_ <- 1 to msgPerSeg)
log.append(set)
assertTrue("Should be ahead of to original offset", log.logEndOffset > msgPerSeg)
assertEquals("log size should be same as before", size, log.size)
log.truncateTo(0) // truncate before first start offset in the log
assertEquals("Should change offset", 0, log.logEndOffset)
assertEquals("Should change log size", log.size, 0)
}
/**
* Verify that when we truncate a log the index of the last segment is resized to the max index size to allow more appends
*/
@Test
def testIndexResizingAtTruncation() {
val setSize = TestUtils.singleMessageSet(payload = "test".getBytes).sizeInBytes
val msgPerSeg = 10
val segmentSize = msgPerSeg * setSize // each segment will be 10 messages
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, segmentSize: java.lang.Integer)
logProps.put(LogConfig.IndexIntervalBytesProp, (setSize - 1): java.lang.Integer)
val config = LogConfig(logProps)
val log = new Log(logDir, config, recoveryPoint = 0L, scheduler = time.scheduler, time = time)
assertEquals("There should be exactly 1 segment.", 1, log.numberOfSegments)
for (i<- 1 to msgPerSeg)
log.append(TestUtils.singleMessageSet(payload = "test".getBytes, timestamp = time.milliseconds + i))
assertEquals("There should be exactly 1 segment.", 1, log.numberOfSegments)
time.sleep(msgPerSeg)
for (i<- 1 to msgPerSeg)
log.append(TestUtils.singleMessageSet(payload = "test".getBytes, timestamp = time.milliseconds + i))
assertEquals("There should be exactly 2 segment.", 2, log.numberOfSegments)
val expectedEntries = msgPerSeg - 1
assertEquals(s"The index of the first segment should have $expectedEntries entries", expectedEntries, log.logSegments.toList.head.index.maxEntries)
assertEquals(s"The time index of the first segment should have $expectedEntries entries", expectedEntries, log.logSegments.toList.head.timeIndex.maxEntries)
log.truncateTo(0)
assertEquals("There should be exactly 1 segment.", 1, log.numberOfSegments)
assertEquals("The index of segment 1 should be resized to maxIndexSize", log.config.maxIndexSize/8, log.logSegments.toList.head.index.maxEntries)
assertEquals("The time index of segment 1 should be resized to maxIndexSize", log.config.maxIndexSize/12, log.logSegments.toList.head.timeIndex.maxEntries)
time.sleep(msgPerSeg)
for (i<- 1 to msgPerSeg)
log.append(TestUtils.singleMessageSet(payload = "test".getBytes, timestamp = time.milliseconds + i))
assertEquals("There should be exactly 1 segment.", 1, log.numberOfSegments)
}
/**
* When we open a log any index segments without an associated log segment should be deleted.
*/
@Test
def testBogusIndexSegmentsAreRemoved() {
val bogusIndex1 = Log.indexFilename(logDir, 0)
val bogusTimeIndex1 = Log.timeIndexFilename(logDir, 0)
val bogusIndex2 = Log.indexFilename(logDir, 5)
val bogusTimeIndex2 = Log.timeIndexFilename(logDir, 5)
val set = TestUtils.singleMessageSet("test".getBytes)
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, set.sizeInBytes * 5: java.lang.Integer)
logProps.put(LogConfig.SegmentIndexBytesProp, 1000: java.lang.Integer)
logProps.put(LogConfig.IndexIntervalBytesProp, 1: java.lang.Integer)
val log = new Log(logDir,
LogConfig(logProps),
recoveryPoint = 0L,
time.scheduler,
time)
assertTrue("The first index file should have been replaced with a larger file", bogusIndex1.length > 0)
assertTrue("The first time index file should have been replaced with a larger file", bogusTimeIndex1.length > 0)
assertFalse("The second index file should have been deleted.", bogusIndex2.exists)
assertFalse("The second time index file should have been deleted.", bogusTimeIndex2.exists)
// check that we can append to the log
for (_ <- 0 until 10)
log.append(set)
log.delete()
}
/**
* Verify that truncation works correctly after re-opening the log
*/
@Test
def testReopenThenTruncate() {
val set = TestUtils.singleMessageSet("test".getBytes)
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, set.sizeInBytes * 5: java.lang.Integer)
logProps.put(LogConfig.SegmentIndexBytesProp, 1000: java.lang.Integer)
logProps.put(LogConfig.IndexIntervalBytesProp, 10000: java.lang.Integer)
val config = LogConfig(logProps)
// create a log
var log = new Log(logDir,
config,
recoveryPoint = 0L,
time.scheduler,
time)
// add enough messages to roll over several segments then close and re-open and attempt to truncate
for (_ <- 0 until 100)
log.append(set)
log.close()
log = new Log(logDir,
config,
recoveryPoint = 0L,
time.scheduler,
time)
log.truncateTo(3)
assertEquals("All but one segment should be deleted.", 1, log.numberOfSegments)
assertEquals("Log end offset should be 3.", 3, log.logEndOffset)
}
/**
* Test that deleted files are deleted after the appropriate time.
*/
@Test
def testAsyncDelete() {
val set = TestUtils.singleMessageSet("test".getBytes)
val asyncDeleteMs = 1000
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, set.sizeInBytes * 5: java.lang.Integer)
logProps.put(LogConfig.SegmentIndexBytesProp, 1000: java.lang.Integer)
logProps.put(LogConfig.IndexIntervalBytesProp, 10000: java.lang.Integer)
logProps.put(LogConfig.FileDeleteDelayMsProp, asyncDeleteMs: java.lang.Integer)
logProps.put(LogConfig.RetentionMsProp, 0: java.lang.Integer)
val config = LogConfig(logProps)
val log = new Log(logDir,
config,
recoveryPoint = 0L,
time.scheduler,
time)
// append some messages to create some segments
for (_ <- 0 until 100)
log.append(set)
// files should be renamed
val segments = log.logSegments.toArray
val oldFiles = segments.map(_.log.file) ++ segments.map(_.index.file)
// expire all segments
log.logSegments.foreach(_.lastModified = time.milliseconds - 1000L)
log.deleteOldSegments()
assertEquals("Only one segment should remain.", 1, log.numberOfSegments)
assertTrue("All log and index files should end in .deleted", segments.forall(_.log.file.getName.endsWith(Log.DeletedFileSuffix)) &&
segments.forall(_.index.file.getName.endsWith(Log.DeletedFileSuffix)))
assertTrue("The .deleted files should still be there.", segments.forall(_.log.file.exists) &&
segments.forall(_.index.file.exists))
assertTrue("The original file should be gone.", oldFiles.forall(!_.exists))
// when enough time passes the files should be deleted
val deletedFiles = segments.map(_.log.file) ++ segments.map(_.index.file)
time.sleep(asyncDeleteMs + 1)
assertTrue("Files should all be gone.", deletedFiles.forall(!_.exists))
}
/**
* Any files ending in .deleted should be removed when the log is re-opened.
*/
@Test
def testOpenDeletesObsoleteFiles() {
val set = TestUtils.singleMessageSet("test".getBytes)
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, set.sizeInBytes * 5: java.lang.Integer)
logProps.put(LogConfig.SegmentIndexBytesProp, 1000: java.lang.Integer)
logProps.put(LogConfig.RetentionMsProp, 0: java.lang.Integer)
val config = LogConfig(logProps)
var log = new Log(logDir,
config,
recoveryPoint = 0L,
time.scheduler,
time)
// append some messages to create some segments
for (_ <- 0 until 100)
log.append(set)
// expire all segments
log.logSegments.foreach(_.lastModified = time.milliseconds - 1000)
log.deleteOldSegments()
log.close()
log = new Log(logDir,
config,
recoveryPoint = 0L,
time.scheduler,
time)
assertEquals("The deleted segments should be gone.", 1, log.numberOfSegments)
}
@Test
def testAppendMessageWithNullPayload() {
val log = new Log(logDir,
LogConfig(),
recoveryPoint = 0L,
time.scheduler,
time)
log.append(new ByteBufferMessageSet(new Message(bytes = null)))
val messageSet = log.read(0, 4096, None).messageSet
assertEquals(0, messageSet.head.offset)
assertTrue("Message payload should be null.", messageSet.head.message.isNull)
}
@Test(expected = classOf[IllegalArgumentException])
def testAppendWithOutOfOrderOffsetsThrowsException() {
val log = new Log(logDir,
LogConfig(),
recoveryPoint = 0L,
time.scheduler,
time)
val messages = (0 until 2).map(id => new Message(id.toString.getBytes)).toArray
messages.foreach(message => log.append(new ByteBufferMessageSet(message)))
val invalidMessage = new ByteBufferMessageSet(new Message(1.toString.getBytes))
log.append(invalidMessage, assignOffsets = false)
}
@Test
def testCorruptLog() {
// append some messages to create some segments
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, 1000: java.lang.Integer)
logProps.put(LogConfig.IndexIntervalBytesProp, 1: java.lang.Integer)
logProps.put(LogConfig.MaxMessageBytesProp, 64*1024: java.lang.Integer)
val config = LogConfig(logProps)
val set = TestUtils.singleMessageSet("test".getBytes)
val recoveryPoint = 50L
for (_ <- 0 until 50) {
// create a log and write some messages to it
logDir.mkdirs()
var log = new Log(logDir,
config,
recoveryPoint = 0L,
time.scheduler,
time)
val numMessages = 50 + TestUtils.random.nextInt(50)
for (_ <- 0 until numMessages)
log.append(set)
val messages = log.logSegments.flatMap(_.log.iterator.toList)
log.close()
// corrupt index and log by appending random bytes
TestUtils.appendNonsenseToFile(log.activeSegment.index.file, TestUtils.random.nextInt(1024) + 1)
TestUtils.appendNonsenseToFile(log.activeSegment.log.file, TestUtils.random.nextInt(1024) + 1)
// attempt recovery
log = new Log(logDir, config, recoveryPoint, time.scheduler, time)
assertEquals(numMessages, log.logEndOffset)
assertEquals("Messages in the log after recovery should be the same.", messages, log.logSegments.flatMap(_.log.iterator.toList))
Utils.delete(logDir)
}
}
@Test
def testCleanShutdownFile() {
// append some messages to create some segments
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, 1000: java.lang.Integer)
logProps.put(LogConfig.MaxMessageBytesProp, 64*1024: java.lang.Integer)
logProps.put(LogConfig.IndexIntervalBytesProp, 1: java.lang.Integer)
val config = LogConfig(logProps)
val set = TestUtils.singleMessageSet("test".getBytes)
val parentLogDir = logDir.getParentFile
assertTrue("Data directory %s must exist", parentLogDir.isDirectory)
val cleanShutdownFile = new File(parentLogDir, Log.CleanShutdownFile)
cleanShutdownFile.createNewFile()
assertTrue(".kafka_cleanshutdown must exist", cleanShutdownFile.exists())
var recoveryPoint = 0L
// create a log and write some messages to it
var log = new Log(logDir,
config,
recoveryPoint = 0L,
time.scheduler,
time)
for (_ <- 0 until 100)
log.append(set)
log.close()
// check if recovery was attempted. Even if the recovery point is 0L, recovery should not be attempted as the
// clean shutdown file exists.
recoveryPoint = log.logEndOffset
log = new Log(logDir, config, 0L, time.scheduler, time)
assertEquals(recoveryPoint, log.logEndOffset)
cleanShutdownFile.delete()
}
@Test
def testParseTopicPartitionName() {
val topic = "test_topic"
val partition = "143"
val dir = new File(logDir + topicPartitionName(topic, partition))
val topicAndPartition = Log.parseTopicPartitionName(dir)
assertEquals(topic, topicAndPartition.asTuple._1)
assertEquals(partition.toInt, topicAndPartition.asTuple._2)
}
@Test
def testParseTopicPartitionNameForEmptyName() {
try {
val dir = new File("")
Log.parseTopicPartitionName(dir)
fail("KafkaException should have been thrown for dir: " + dir.getCanonicalPath)
} catch {
case _: Exception => // its GOOD!
}
}
@Test
def testParseTopicPartitionNameForNull() {
try {
val dir: File = null
Log.parseTopicPartitionName(dir)
fail("KafkaException should have been thrown for dir: " + dir)
} catch {
case _: Exception => // its GOOD!
}
}
@Test
def testParseTopicPartitionNameForMissingSeparator() {
val topic = "test_topic"
val partition = "1999"
val dir = new File(logDir + File.separator + topic + partition)
try {
Log.parseTopicPartitionName(dir)
fail("KafkaException should have been thrown for dir: " + dir.getCanonicalPath)
} catch {
case _: Exception => // its GOOD!
}
}
@Test
def testParseTopicPartitionNameForMissingTopic() {
val topic = ""
val partition = "1999"
val dir = new File(logDir + topicPartitionName(topic, partition))
try {
Log.parseTopicPartitionName(dir)
fail("KafkaException should have been thrown for dir: " + dir.getCanonicalPath)
} catch {
case _: Exception => // its GOOD!
}
}
@Test
def testParseTopicPartitionNameForMissingPartition() {
val topic = "test_topic"
val partition = ""
val dir = new File(logDir + topicPartitionName(topic, partition))
try {
Log.parseTopicPartitionName(dir)
fail("KafkaException should have been thrown for dir: " + dir.getCanonicalPath)
} catch {
case _: Exception => // its GOOD!
}
}
def topicPartitionName(topic: String, partition: String): String =
File.separator + topic + "-" + partition
@Test
def testDeleteOldSegmentsMethod() {
val set = TestUtils.singleMessageSet("test".getBytes)
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, set.sizeInBytes * 5: java.lang.Integer)
logProps.put(LogConfig.SegmentIndexBytesProp, 1000: java.lang.Integer)
logProps.put(LogConfig.RetentionMsProp, 0: java.lang.Integer)
val config = LogConfig(logProps)
val log = new Log(logDir,
config,
recoveryPoint = 0L,
time.scheduler,
time)
// append some messages to create some segments
for (_ <- 0 until 100)
log.append(set)
// expire all segments
log.logSegments.foreach(_.lastModified = time.milliseconds - 1000)
log.deleteOldSegments()
assertEquals("The deleted segments should be gone.", 1, log.numberOfSegments)
// append some messages to create some segments
for (_ <- 0 until 100)
log.append(set)
log.delete()
assertEquals("The number of segments should be 0", 0, log.numberOfSegments)
assertEquals("The number of deleted segments should be zero.", 0, log.deleteOldSegments())
}
@Test
def shouldDeleteSizeBasedSegments() {
val set = TestUtils.singleMessageSet("test".getBytes)
val log = createLog(set.sizeInBytes, retentionBytes = set.sizeInBytes * 10)
// append some messages to create some segments
for (_ <- 0 until 15)
log.append(set)
log.deleteOldSegments
assertEquals("should have 2 segments", 2,log.numberOfSegments)
}
@Test
def shouldNotDeleteSizeBasedSegmentsWhenUnderRetentionSize() {
val set = TestUtils.singleMessageSet("test".getBytes)
val log = createLog(set.sizeInBytes, retentionBytes = set.sizeInBytes * 15)
// append some messages to create some segments
for (_ <- 0 until 15)
log.append(set)
log.deleteOldSegments
assertEquals("should have 3 segments", 3,log.numberOfSegments)
}
@Test
def shouldDeleteTimeBasedSegmentsReadyToBeDeleted() {
val set = TestUtils.singleMessageSet("test".getBytes, timestamp = 10)
val log = createLog(set.sizeInBytes, retentionMs = 10000)
// append some messages to create some segments
for (_ <- 0 until 15)
log.append(set)
log.deleteOldSegments()
assertEquals("There should be 1 segment remaining", 1, log.numberOfSegments)
}
@Test
def shouldNotDeleteTimeBasedSegmentsWhenNoneReadyToBeDeleted() {
val set = TestUtils.singleMessageSet("test".getBytes, timestamp = time.milliseconds)
val log = createLog(set.sizeInBytes, retentionMs = 10000000)
// append some messages to create some segments
for (_ <- 0 until 15)
log.append(set)
log.deleteOldSegments()
assertEquals("There should be 3 segments remaining", 3, log.numberOfSegments)
}
@Test
def shouldNotDeleteSegmentsWhenPolicyDoesNotIncludeDelete() {
val set = TestUtils.singleMessageSet("test".getBytes, key = "test".getBytes(), timestamp = 10L)
val log = createLog(set.sizeInBytes,
retentionMs = 10000,
cleanupPolicy = "compact")
// append some messages to create some segments
for (_ <- 0 until 15)
log.append(set)
// mark oldest segment as older the retention.ms
log.logSegments.head.lastModified = time.milliseconds - 20000
val segments = log.numberOfSegments
log.deleteOldSegments()
assertEquals("There should be 3 segments remaining", segments, log.numberOfSegments)
}
@Test
def shouldDeleteSegmentsReadyToBeDeletedWhenCleanupPolicyIsCompactAndDelete() {
val set = TestUtils.singleMessageSet("test".getBytes, key = "test".getBytes,timestamp = 10L)
val log = createLog(set.sizeInBytes,
retentionMs = 10000,
cleanupPolicy = "compact,delete")
// append some messages to create some segments
for (_ <- 0 until 15)
log.append(set)
log.deleteOldSegments()
assertEquals("There should be 1 segment remaining", 1, log.numberOfSegments)
}
def createLog(messageSizeInBytes: Int, retentionMs: Int = -1,
retentionBytes: Int = -1, cleanupPolicy: String = "delete"): Log = {
val logProps = new Properties()
logProps.put(LogConfig.SegmentBytesProp, messageSizeInBytes * 5: Integer)
logProps.put(LogConfig.RetentionMsProp, retentionMs: Integer)
logProps.put(LogConfig.RetentionBytesProp, retentionBytes: Integer)
logProps.put(LogConfig.CleanupPolicyProp, cleanupPolicy)
val config = LogConfig(logProps)
val log = new Log(logDir,
config,
recoveryPoint = 0L,
time.scheduler,
time)
log
}
}
|
geeag/kafka
|
core/src/test/scala/unit/kafka/log/LogTest.scala
|
Scala
|
apache-2.0
| 53,285
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.orc
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce.Job
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.execution.datasources.OutputWriterFactory
import org.apache.spark.sql.types.StructType
/**
* `ReadOnlyOrcFileFormat` only support read orc operation and not support write,
* in oap we use it to create and refresh index because of isSplitable method always return false.
*/
class ReadOnlyOrcFileFormat extends OrcFileFormat {
override def isSplitable(
sparkSession: SparkSession,
options: Map[String, String],
path: Path): Boolean = false
override def prepareWrite(
sparkSession: SparkSession,
job: Job,
options: Map[String, String],
dataSchema: StructType): OutputWriterFactory =
throw new UnsupportedOperationException("ReadOnlyOrcFileFormat not support write operation")
}
|
Intel-bigdata/OAP
|
oap-cache/oap/src/main/scala/org/apache/spark/sql/hive/orc/ReadOnlyOrcFileFormat.scala
|
Scala
|
apache-2.0
| 1,712
|
/*
* Created on 2010/04/09
* Copyright (c) 2010-2014, Wei-ju Wu.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of Wei-ju Wu nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package org.zmpp.glulx.swing
import java.util.logging._
import java.io.File
import javax.swing._
import java.awt.BorderLayout
import java.awt.Dimension
import java.awt.Font
import java.awt.event._
import java.awt.image.BufferedImage
import javax.imageio.ImageIO
import org.zmpp.base._
import org.zmpp.glk._
import org.zmpp.glulx._
import scala.collection.mutable.HashMap
/**
* This trait ensures that we always run the UI relevant methods in the
* event dispatch thread.
*/
trait SwingGlkWindowUI extends GlkWindowUI
with MouseListener with MouseMotionListener {
def container: JComponent
// functions that run in the UI thread
def _moveCursor(xpos: Int, ypos: Int)
def _setStyle(value: Int) {}
def _flush {}
def _clear
def eventManager: EventManager
def style = 0
def style_=(value: Int) {
if (SwingUtilities.isEventDispatchThread) _setStyle(value)
else {
SwingUtilities.invokeLater(new Runnable {
def run = _setStyle(value)
})
}
}
def flush {
if (SwingUtilities.isEventDispatchThread) _flush
else {
SwingUtilities.invokeLater(new Runnable {
def run = _flush
})
}
}
def clear {
if (SwingUtilities.isEventDispatchThread) _clear
else {
SwingUtilities.invokeLater(new Runnable {
def run = _clear
})
}
}
def moveCursor(x: Int, y: Int) {
if (SwingUtilities.isEventDispatchThread) _moveCursor(x, y)
else {
SwingUtilities.invokeAndWait(new Runnable {
def run = _moveCursor(x, y)
})
}
}
def eraseRect(left: Int, top: Int, width: Int, height: Int) {
throw new UnsupportedOperationException(
"fillRect() not supported for this window type")
}
def fillRect(color: Int, left: Int, top: Int, width: Int, height: Int) {
throw new UnsupportedOperationException(
"fillRect() not supported for this window type")
}
def drawScaledImage(resnum: Int, posx: Int, posy: Int, width: Int,
height: Int) {
throw new UnsupportedOperationException(
"This window does not support drawing images")
}
def drawImage(resnum: Int, posx: Int, posy: Int) {
throw new UnsupportedOperationException(
"This window does not support drawing images")
}
def requestLineInput
def requestPreviousLineInput
def requestCharInput
def requestHyperlinkEvent
def requestMouseInput
def cancelLineInput: String
}
class Hyperlink(val id: Int) {
var startPos: Int = 0
var endPos : Int = 0
def contains(pos: Int): Boolean = pos >= startPos && pos <= endPos
}
trait SwingGlkScreenUI extends GlkScreenUI {
val logger = Logger.getLogger("glk.ui")
private[this] val _windowUIs = new HashMap[Int, SwingGlkWindowUI]
private[this] val _imageCache = new HashMap[Int, BufferedImage]
private[this] val TextGridExtraMargin = 3
private[this] val TextBufferExtraMargin = 3
var fixedFont = getDefaultFixedFont
var standardFont = getDefaultNormalFont
var lineHeightTextGrid = 0
var charWidthTextGrid = 0
var lineHeightStdFont = 0
var charWidthStdFont = 0
var vm: GlulxVM = null
var currentView : JComponent = null
// Currently, this "status bar" is just a focus catcher, i.e. it captures
// the focus when the view layout hierarchy is reorganized
var statusBar : JLabel = null
def eventManager = vm.eventManager
def blorbData = vm.blorbData
private def getDefaultFixedFont = {
var prefFont = Font.decode("Inconsolata-PLAIN-14")
if (prefFont.getFamily == "Dialog")
prefFont = Font.decode("Courier New-PLAIN-14")
if (prefFont.getFamily == "Dialog")
prefFont = new Font("Monospaced", Font.PLAIN, 14)
prefFont
}
private def getDefaultNormalFont = {
var prefFont = Font.decode("American Typewriter-PLAIN-14")
if (prefFont.getFamily == "Dialog")
prefFont = Font.decode("Times New Roman-PLAIN-14")
if (prefFont.getFamily == "Dialog")
prefFont = new Font("Serif", Font.PLAIN, 14)
prefFont
}
private def proportionalSize(fullSize: Int, relSize: Int): Int = {
(fullSize.toDouble * relSize.toDouble / 100.0).toInt
}
private def fixedHeight(pair: GlkPairWindow) = {
if (pair.keyWindow.isGraphics) pair.keyWindow.size
else if (pair.keyWindow.isTextBuffer) {
lineHeightStdFont * pair.keyWindow.size +
SwingTextBufferUI.MarginTop + SwingTextBufferUI.MarginBottom
}
else {
lineHeightTextGrid * pair.keyWindow.size +
SwingTextGridUI.MarginTop + SwingTextGridUI.MarginBottom
}
}
private def fixedWidth(pair: GlkPairWindow) = {
if (pair.keyWindow.isGraphics) pair.keyWindow.size
else if (pair.keyWindow.isTextBuffer) {
charWidthStdFont * pair.keyWindow.size + SwingTextBufferUI.MarginLeft +
SwingTextBufferUI.MarginRight
}
else {
charWidthTextGrid * pair.keyWindow.size +
SwingTextGridUI.MarginLeft + SwingTextGridUI.MarginRight
}
}
private def calculateHeight(pair: GlkPairWindow, fullSize: Int): Int = {
if (pair.isProportional) proportionalSize(fullSize, pair.keyWindow.size)
else fixedHeight(pair)
}
private def calculateWidth(pair: GlkPairWindow, fullSize: Int): Int = {
if (pair.isProportional) proportionalSize(fullSize, pair.keyWindow.size)
else fixedWidth(pair)
}
private def distributeRemainder(window: GlkWindow, remainSize: Dimension) = {
if (window.isLeaf) {
// Leafs can always get the remainder size
_windowUIs(window.id).asInstanceOf[JComponent]
.setPreferredSize(remainSize)
(_windowUIs(window.id).container, remainSize)
} else {
makeLayout(window, remainSize)
}
}
private def makeLayout(window: GlkWindow,
currentSize: Dimension): JComponent = {
if (window == null) {
val emptyPanel = new JPanel
emptyPanel.setPreferredSize(currentSize)
emptyPanel
} else if (window.isLeaf) {
val component = _windowUIs(window.id).container
component.setPreferredSize(currentSize)
component
} else {
val pair = window.asInstanceOf[GlkPairWindow]
var keyWidth = 0
var keyHeight = 0
var leftSize: Dimension = null
if (pair.isVertical) {
keyWidth = currentSize.width
keyHeight = calculateHeight(pair, currentSize.height)
leftSize = new Dimension(currentSize.width,
currentSize.height - keyHeight)
} else {
keyWidth = calculateWidth(pair, currentSize.width)
keyHeight = currentSize.height
leftSize = new Dimension(currentSize.width - keyWidth,
currentSize.height)
}
val rightSize = new Dimension(keyWidth, keyHeight)
val leftComponent = makeLayout(pair.child0, leftSize)
val rightComponent = makeLayout(pair.child1, rightSize)
val pairPanel = if (pair.isVertical) new Box(BoxLayout.Y_AXIS)
else new Box(BoxLayout.X_AXIS)
if (pair.isLeft || pair.isAbove) {
pairPanel.add(rightComponent)
pairPanel.add(leftComponent)
} else {
pairPanel.add(leftComponent)
pairPanel.add(rightComponent)
}
pairPanel
}
}
def updateLayout(root: GlkWindow) {
val runnable = new Runnable {
def run {
if (statusBar == null) {
statusBar = new JLabel("")
getContentPane.add(statusBar, BorderLayout.SOUTH)
}
val view = makeLayout(root, getClientSize)
// ensure that we do not lose the input focus when we close the
// current view. We do this by setting the input focus to the status
// bar
statusBar.requestFocusInWindow
if (currentView != null) getContentPane.remove(currentView)
getContentPane.invalidate
getContentPane.add(view, BorderLayout.CENTER)
currentView = view
getContentPane.validate
// we also need to reset the text grids in order
// to pre-fill them with spaces
_windowUIs.foreach(elem => if (elem._2.isInstanceOf[SwingTextGridUI]) {
elem._2.asInstanceOf[SwingTextGridUI].reset
})
}
}
// resize synchronously to make sure the VM can retrieve the right size
// (we might do synchronous getsize instead)
if (SwingUtilities.isEventDispatchThread) runnable.run
else SwingUtilities.invokeAndWait(runnable)
}
def createTextBufferUI(id: Int, glkWindow: GlkUIWindow) = {
val winui = new SwingTextBufferUI(this, glkWindow)
winui.setPreferredSize(new Dimension(640, 480))
_windowUIs += id -> winui
winui
}
def createTextGridUI(id: Int, glkWindow: GlkUIWindow) = {
val winui = new SwingTextGridUI(this, glkWindow)
winui.setPreferredSize(new Dimension(640, lineHeightTextGrid))
_windowUIs += id -> winui
winui
}
def createGraphicsUI(id: Int, glkWindow: GlkWindow) = {
val winui = new SwingGraphicsUI(this, glkWindow)
winui.setPreferredSize(new Dimension(200, 10))
_windowUIs += id -> winui
winui
}
def initMetrics {
val g = getGraphics
val stdMetrics = g.getFontMetrics(standardFont)
val fixedMetrics = g.getFontMetrics(fixedFont)
lineHeightTextGrid = fixedMetrics.getMaxAscent + fixedMetrics.getMaxDescent
charWidthTextGrid = g.getFontMetrics(fixedFont).charWidth('0')
lineHeightStdFont = stdMetrics.getMaxAscent + stdMetrics.getMaxDescent
charWidthStdFont = g.getFontMetrics(standardFont).charWidth('0')
}
// Line input
def _requestLineInput(windowId: Int) {
//logger.info("REQUEST_LINE_INPUT(%d)".format(windowId))
val windowUI = _windowUIs(windowId)
windowUI.flush
_windowUIs(windowId).requestLineInput
}
def requestLineInput(windowId: Int) {
if (SwingUtilities.isEventDispatchThread) _requestLineInput(windowId)
else {
SwingUtilities.invokeAndWait(new Runnable {
def run = _requestLineInput(windowId)
})
}
}
def _requestPreviousLineInput(windowId: Int) {
val windowUI = _windowUIs(windowId)
windowUI.flush
_windowUIs(windowId).requestPreviousLineInput
}
def requestPreviousLineInput(windowId: Int) {
if (SwingUtilities.isEventDispatchThread) _requestPreviousLineInput(windowId)
else {
SwingUtilities.invokeAndWait(new Runnable {
def run = _requestPreviousLineInput(windowId)
})
}
}
// Character input
def _requestCharInput(windowId: Int) {
//logger.info("LISTEN TO CHAR_INPUT(%d)".format(windowId))
_windowUIs(windowId).flush
_windowUIs(windowId).requestCharInput
}
def requestCharInput(windowId: Int) {
if (SwingUtilities.isEventDispatchThread) _requestCharInput(windowId)
else {
SwingUtilities.invokeAndWait(new Runnable {
def run = _requestCharInput(windowId)
})
}
}
def _requestHyperlinkEvent(windowId: Int) {
_windowUIs(windowId).flush
_windowUIs(windowId).requestHyperlinkEvent
}
def requestHyperlinkEvent(windowId: Int) {
if (SwingUtilities.isEventDispatchThread) _requestHyperlinkEvent(windowId)
else {
SwingUtilities.invokeAndWait(new Runnable {
def run = _requestHyperlinkEvent(windowId)
})
}
}
// Mouse input
def _requestMouseInput(windowId: Int) {
//logger.info("LISTEN TO MOUSE_INPUT(%d)".format(windowId))
_windowUIs(windowId).flush
_windowUIs(windowId).requestMouseInput
}
def requestMouseInput(windowId: Int) {
if (SwingUtilities.isEventDispatchThread) _requestMouseInput(windowId)
else {
SwingUtilities.invokeAndWait(new Runnable {
def run = _requestMouseInput(windowId)
})
}
}
def cancelLineInput(windowId: Int): String = {
_windowUIs(windowId).cancelLineInput
}
var _timer: javax.swing.Timer = null
def requestTimerInput(millis: Int) {
//logger.info("REQUESTING TIMER INPUT FOR %d MILLIS".format(millis))
if (_timer != null) _timer.stop
if (millis != 0) {
_timer = new javax.swing.Timer(millis, new ActionListener {
def actionPerformed(event: ActionEvent) {
eventManager.addTimerEvent
resumeWithNextEvent
}
})
_timer.start
}
}
private def resumeWithNextEvent {
if (vm.state.runState == VMRunStates.WaitForEvent &&
eventManager.processNextEvent) {
ExecutionControl.executeTurn(vm)
}
}
def getContentPane: java.awt.Container
def getGraphics: java.awt.Graphics
def getClientSize: java.awt.Dimension
def getImage(resnum: Int): BufferedImage = {
//logger.info("getImage(%d)".format(resnum))
val resourceInfo = blorbData.pictureResource(resnum)
if (resourceInfo != null) {
// many games use the same images over and over, cache them
if (!_imageCache.contains(resnum)) {
val inputStream = blorbData.pictureInputStream(resnum)
_imageCache += resnum -> ImageIO.read(inputStream)
}
_imageCache(resnum)
} else {
logger.warning("IMAGE NUM NOT FOUND: %d".format(resnum))
null
}
}
def imageSize(resnum: Int): GlkDimension = {
val image = getImage(resnum)
if (image == null) null
else new GlkDimension(image.getWidth, image.getHeight)
}
def selectFileByDialog(usage: Int, fmode: Int): File = {
val usageType = usage & FileUsageTypes.TypeMask
val fileTypeName = if (usageType == FileUsageTypes.SavedGame) "Game Save"
else if (usageType == FileUsageTypes.Transcript) "Transcript"
else if (usageType == FileUsageTypes.InputRecord) "Input Record"
else "Data"
val fileChooser = new JFileChooser
val result = if (fmode == FileModes.Read) {
fileChooser.setDialogTitle("Open %s File...".format(fileTypeName))
fileChooser.showDialog(getContentPane, "Open")
} else {
fileChooser.setDialogTitle("Save %s File...".format(fileTypeName))
fileChooser.showDialog(getContentPane, "Save")
}
if (result == JFileChooser.APPROVE_OPTION) fileChooser.getSelectedFile
else null
}
}
class GlkFrameUI extends JFrame with SwingGlkScreenUI {
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE)
def getClientSize = getContentPane.getSize
}
|
weiju/zmpp2
|
zmpp-swing/src/main/scala/org/zmpp/glulx/swing/NativeUI.scala
|
Scala
|
bsd-3-clause
| 15,790
|
import org.eclipse.jetty.server.nio.SelectChannelConnector
import org.eclipse.jetty.server.Server
import org.eclipse.jetty.webapp.WebAppContext
object JettyEmbedded extends App{
val server = new Server()
val connector = new SelectChannelConnector()
connector.setPort(args(0).toInt)
server.addConnector(connector)
val context: WebAppContext = new WebAppContext(getClass.getClassLoader.getResource("webapp").toExternalForm, "/")
context.setServer(server)
server.setHandler(context)
try {
server.start()
server.join()
} catch {
case e: Exception => {
e.printStackTrace()
System.exit(1)
}
}
}
|
joroKr21/spatio-temporal-dynamics
|
impro3-ws14-frontend/src/main/scala/JettyEmbedded.scala
|
Scala
|
apache-2.0
| 670
|
/***
* Copyright 2015 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspace.com.papi.components.checker.step
import javax.servlet.FilterChain
import com.rackspace.com.papi.components.checker.servlet._
import com.rackspace.com.papi.components.checker.step.base.{ConnectedStep, Step, StepContext}
import com.rackspace.com.papi.components.checker.util.HeaderUtil._
class SetHeader(id : String, label : String, val name : String, val value : String,
next : Array[Step]) extends ConnectedStep(id, label, next) {
override def checkStep(req : CheckerServletRequest, resp : CheckerServletResponse, chain : FilterChain, context : StepContext) : Option[StepContext] = {
//
// If a header by the name exists then continue, if not, then set
// it to the default value.
//
// Note that the context itself is searched for the header so a
// value set in the context is not overwritten.
//
if (hasHeader(context, req, name)) {
Some(context)
} else {
Some(context.copy(requestHeaders = context.requestHeaders.addHeader(name, value)))
}
}
}
|
wdschei/api-checker
|
core/src/main/scala/com/rackspace/com/papi/components/checker/step/SetHeader.scala
|
Scala
|
apache-2.0
| 1,675
|
import scala.io.Source
/**
* Created by sayon on 02.04.2015.
*/
object Solver extends App {
val input = Source.fromFile("input.txt").getLines().map(_.split(" ").map(_.toInt)).toArray
println(input.length)
println(input(0).length)
val verts = for(i <- 0 until 17; j <- 0 until 20) yield {for (k <- 0 to 3) yield input(i+k)(j) }
println(verts(0))
val vertsmax = verts.map(_.reduce(_ * _)).max
val hors= for(i <- 0 until 17; j <- 0 until 20) yield {for (k <- 0 to 3) yield input(j)(i+k) }
val horsmax = hors.map(_.reduce(_ * _)).max
println(hors(0))
val diags1 = for(i <- 0 until 17; j <- 0 until 17) yield {for (k <- 0 to 3) yield input(j+k)(i+k) }
val diagsmax1 = diags1.map(_.reduce(_ * _)).max
val diags2 = for(i <- 0 until 17; j <- 0 until 17) yield {for (k <- 0 to 3) yield input(j+3-k)(i+k) }
val diagsmax2 = diags2.map(_.reduce(_ * _)).max
println(diags1(0))
println(diags2(0))
println(horsmax max vertsmax max diagsmax1 max diagsmax2)
}
|
sayon/euler
|
11/src/solver.scala
|
Scala
|
mit
| 1,021
|
package com.github.al.roulette.winnings.impl
import akka.Done
import com.github.al.roulette
import com.github.al.roulette.bet.api.{Bet, PlayerBets}
import com.github.al.roulette.winnings.api.PlayerWinning
import com.lightbend.lagom.scaladsl.persistence.PersistentEntity
class WinningsEntity extends PersistentEntity {
override type Command = WinningsCommand
override type Event = WinningsEvent
override type State = WinningsState
override def initialState: WinningsState = WinningsState()
override def behavior: Behavior =
state => Actions()
.onCommand[SavePlayersBets, Done] {
case (SavePlayersBets(playersBets), ctx, _) =>
val playersBetsSavedEvent = PlayersBetsSaved(playersBets)
val events = appendWinningsCalculatedEventIfRequired(state.winningNumber, Some(playersBets), playersBetsSavedEvent)
ctx.thenPersistAll(events: _*)(() => ctx.reply(Done))
}.onCommand[SaveGameResult, Done] {
case (SaveGameResult(winningNumber), ctx, _) =>
val gameResultSavedEvent = GameResultSaved(winningNumber)
val events = appendWinningsCalculatedEventIfRequired(Some(winningNumber), state.playersBets, gameResultSavedEvent)
ctx.thenPersistAll(events: _*)(() => ctx.reply(Done))
}.onEvent {
case (PlayersBetsSaved(playersBets), _) =>
state.copy(playersBets = Some(playersBets))
}.onEvent {
case (GameResultSaved(winningNumber), _) =>
state.copy(winningNumber = Some(winningNumber))
}.onEvent {
case (WinningsCalculated(winnings), _) =>
state.copy(playersWinnings = Some(winnings))
}
private def appendWinningsCalculatedEventIfRequired(winningNumberOption: Option[Int],
playersBetsOption: Option[List[PlayerBets]],
winningsEvent: WinningsEvent): List[WinningsEvent] = {
val winningsCalculatedEventOption = for {
winningNumber <- winningNumberOption
playersBets <- playersBetsOption
winningsCalculatedEvent = WinningsCalculated(calculateWinnings(winningNumber, playersBets))
} yield winningsCalculatedEvent
winningsCalculatedEventOption.foldRight[List[WinningsEvent]](Nil)(_ :: _) ::: List(winningsEvent)
}
private def calculateWinnings(winningNumber: Int, playersBets: List[PlayerBets]): List[PlayerWinning] =
playersBets.map { case PlayerBets(playerId, bets) =>
PlayerWinning(playerId, bets.map(getWinnings(winningNumber, _)).sum)
}
private def getWinnings(winningNumber: Int, bet: Bet): BigDecimal = {
bet match {
case Bet(Some(betNumber), roulette.bet.api.Number, betAmount) if betNumber == winningNumber => betAmount * 36
case Bet(None, roulette.bet.api.Even, betAmount) if winningNumber % 2 == 0 => betAmount * 2
case Bet(None, roulette.bet.api.Odd, betAmount) if winningNumber % 2 != 0 => betAmount * 2
case _ => 0
}
}
}
|
andrei-l/reactive-roulette
|
player-winnings-impl/src/main/scala/com/github/al/roulette/winnings/impl/WinningsEntity.scala
|
Scala
|
mit
| 2,955
|
package org.jmespike.appearance
import org.scalaprops.Bean
import simplex3d.math.float.functions._
import simplex3d.math.float._
import java.awt.Color
import com.jme3.material.Material
import org.jmespike.conf.Conf
import org.jmespike.{Context}
import org.jmespike.utils.VectorConversions._
import org.jmespike.conf.{RandomColorConf, ColorConf}
import java.util.Random
import org.jmespike.utils.XorShiftRandom
import com.jme3.asset.AssetManager
/**
* The surface appearance of a material.
*/
class MaterialConf() extends Conf {
// Diffuse color
val color = p('color, new RandomColorConf())
// Ambient color = self illuminating color
val ambientColor = p('ambientColor, new RandomColorConf())
// Color for calculating specular highlight (colored for metals, otherwise white)
val specularColor = p('specularColor, new RandomColorConf())
// Size of specular highlight
val shininess = p('shininess, 5f).editor(makeSlider(0, 20))
val texture = p('texture, "placeholder.png")
// Init colors
specularColor().sat := 0f
specularColor().lum := 1f
ambientColor().sat := 0f
ambientColor().lum := 0f
color().sat := 0f
color().lum := 0.5f
def createMaterial(seed: Int, assetManager: AssetManager): Material = {
val random: Random = new XorShiftRandom(seed)
val shader = "Common/MatDefs/Light/Lighting.j3md"
val mat = new Material(assetManager, shader)
val loadedTexture = assetManager.loadTexture("textures/" + texture)
if (loadedTexture != null) mat.setTexture("DiffuseMap", loadedTexture);
mat.setBoolean("UseMaterialColors", true)
mat.setColor("Ambient", ambientColor().createColor(random))
mat.setColor("Diffuse", color().createColor(random))
mat.setColor("Specular", specularColor().createColor(random))
mat.setFloat("Shininess", shininess())
mat
}
}
|
zzorn/skycastle
|
src/main/scala/org/jmespike/appearance/MaterialConf.scala
|
Scala
|
gpl-2.0
| 1,837
|
package nest.sparkle.util
object StableGroupBy {
implicit class Implicit[T](collection:Traversable[T]) {
/** groupBy that preserves the order of the underlying sequences (and so returns a Seq rather than a map) */
def stableGroupBy[U](fn: T => U): Traversable[(U, Seq[T])] = {
val groups = collection.groupBy(fn)
val keysInOrder = collection.map(fn(_)).toSeq.distinct
keysInOrder.map { key => (key, groups(key).toSeq) }
}
}
}
|
mighdoll/sparkle
|
util/src/main/scala/nest/sparkle/util/StableGroupBy.scala
|
Scala
|
apache-2.0
| 463
|
package at.logic.gapt.proofs.resolution
import at.logic.gapt.expr._
import at.logic.gapt.expr.hol.{ CNFn, CNFp, univclosure }
import at.logic.gapt.proofs._
import at.logic.gapt.proofs.expansionTrees.{ ExpansionSequent, formulaToExpansionTree }
import scala.collection.mutable
object RobinsonToExpansionProof {
def apply( p: ResolutionProof, es: HOLSequent ): ExpansionSequent =
if ( !es.forall( isInVNF( _ ) ) ) {
val vnfES = es map { toVNF( _ ) }
apply( fixDerivation( p, vnfES ), vnfES )
} else {
val cnfMap: Map[HOLClause, Set[( Boolean, HOLFormula, Map[Var, LambdaExpression] )]] =
es.map(
ant => CNFp.toClauseList( ant ).map { ( _, false, ant ) },
suc => CNFn.toFClauseList( suc ).map { ( _, true, suc ) }
).elements.flatten.groupBy( _._1 ).mapValues {
_ map {
case ( cnfClause, pol, formula ) =>
( pol, formula,
variables( formula ).map( v => v -> Const( "arbitrary", v.exptype ) ).toMap
++ variables( cnfClause ).map( v => v -> v ) )
} toSet
}
apply_( p, cnfMap )
}
def apply( p: ResolutionProof ): ExpansionSequent =
apply_( p, clause => Set(
( false,
univclosure( clause.toFormula ),
freeVariables( clause.toFormula ).map { v => v -> v }.toMap )
) )
private def apply_( p: ResolutionProof, instForIC: HOLClause => Set[( Boolean, HOLFormula, Map[Var, LambdaExpression] )] ): ExpansionSequent = {
val inst = getInstances( p, instForIC )
// Expansion trees require instance terms not to contain the quantified variables.
// Hence we ground the instance substitutions here.
// FIXME: maybe just rename the variables?
val instSubsts = inst.map {
case ( pol, formula, subst ) =>
val ground = Substitution( freeVariables( subst.values ).map( v => v -> Const( "arbitrary", v.exptype ) ) )
( pol, formula, Substitution( subst mapValues { ground( _ ) } ) )
}
Sequent(
instSubsts.filter( _._1 == false ).groupBy( _._2 ).map {
case ( formula, substs ) =>
formulaToExpansionTree( formula, substs.map( _._3 ).toList, false )
}.toSeq,
instSubsts.filter( _._1 == true ).groupBy( _._2 ).map {
case ( formula, substs ) =>
formulaToExpansionTree( formula, substs.map( _._3 ).toList, true )
}.toSeq
)
}
private def getInstances( p: ResolutionProof, instForIC: HOLClause => Set[( Boolean, HOLFormula, Map[Var, LambdaExpression] )] ): Set[( Boolean, HOLFormula, Map[Var, LambdaExpression] )] = {
val substMap = mutable.Map[ResolutionProof, Set[( Boolean, HOLFormula, Map[Var, LambdaExpression] )]]()
def getInst( node: ResolutionProof ): Set[( Boolean, HOLFormula, Map[Var, LambdaExpression] )] =
substMap.getOrElseUpdate( node, node match {
case InputClause( clause ) =>
instForIC( clause )
case Instance( subProof, subst ) =>
getInst( subProof ) map {
case ( pol, formula, instSubst ) =>
( pol, formula, instSubst mapValues { subst( _ ) } )
}
case _ => node.immediateSubProofs flatMap getInst toSet
} )
getInst( p )
}
}
|
loewenheim/gapt
|
src/main/scala/at/logic/gapt/proofs/resolution/RobinsonToExpansionProof.scala
|
Scala
|
gpl-3.0
| 3,239
|
package scala.pickling
package runtime
import scala.reflect.{runtime => reflectRuntime}
import internal._
trait RuntimePicklersUnpicklers {
GlobalRegistry.picklerMap += ("scala.Tuple2" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2" -> (new Tuple2RTPickler(null)))
/* Register all specialized variants of Tuple2.
*/
GlobalRegistry.picklerMap += ("scala.Tuple2$mcII$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcIJ$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcID$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcIC$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcIZ$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcJI$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcJJ$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcJD$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcJC$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcJZ$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcDI$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcDJ$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcDD$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcDC$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcDZ$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcCI$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcCJ$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcCD$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcCC$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcCZ$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcZI$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcZJ$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcZD$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcZC$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.picklerMap += ("scala.Tuple2$mcZZ$sp" -> (tag => new Tuple2RTPickler(tag)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcII$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcIJ$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcID$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcIC$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcIZ$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcJI$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcJJ$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcJD$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcJC$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcJZ$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcDI$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcDJ$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcDD$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcDC$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcDZ$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcCI$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcCJ$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcCD$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcCC$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcCZ$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcZI$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcZJ$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcZD$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcZC$sp" -> (new Tuple2RTPickler(null)))
GlobalRegistry.unpicklerMap += ("scala.Tuple2$mcZZ$sp" -> (new Tuple2RTPickler(null)))
def mkRuntimeTravPickler[C <% Traversable[_]](elemClass: Class[_], elemTag: FastTypeTag[_], collTag: FastTypeTag[_],
elemPickler0: Pickler[_], elemUnpickler0: Unpickler[_]):
Pickler[C] with Unpickler[C] = new Pickler[C] with Unpickler[C] {
val elemPickler = elemPickler0.asInstanceOf[Pickler[AnyRef]]
val elemUnpickler = elemUnpickler0.asInstanceOf[Unpickler[AnyRef]]
val isPrimitive = elemTag.tpe.isEffectivelyPrimitive
def tag: FastTypeTag[C] = collTag.asInstanceOf[FastTypeTag[C]]
def pickle(coll: C, builder: PBuilder): Unit = {
builder.beginEntry(coll)
builder.beginCollection(coll.size)
builder.pushHints()
if (isPrimitive) {
builder.hintStaticallyElidedType()
builder.hintTag(elemTag)
builder.pinHints()
}
(coll: Traversable[_]).asInstanceOf[Traversable[AnyRef]].foreach { (elem: AnyRef) =>
builder putElement { b =>
if (!isPrimitive) b.hintTag(elemTag)
elemPickler.pickle(elem, b)
}
}
builder.popHints()
builder.endCollection()
builder.endEntry()
}
def unpickle(tag: String, preader: PReader): Any = {
val reader = preader.beginCollection()
preader.pushHints()
if (isPrimitive) {
reader.hintStaticallyElidedType()
reader.hintTag(elemTag)
reader.pinHints()
}
val length = reader.readLength()
val newArray = java.lang.reflect.Array.newInstance(elemClass, length).asInstanceOf[Array[AnyRef]]
var i = 0
while (i < length) {
try {
val r = reader.readElement()
val elem = elemUnpickler.unpickleEntry(r)
newArray(i) = elem.asInstanceOf[AnyRef]
i = i + 1
} catch {
case PicklingException(msg, cause) =>
throw PicklingException(s"""error in unpickle of 'mkRuntimeTravPickler':
|collTag: '${collTag.key}'
|elemTag: '${elemTag.key}'
|message:
|$msg""".stripMargin, cause)
case e: Exception =>
e.printStackTrace()
throw PicklingException(s"""exception in unpickle of 'mkRuntimeTravPickler':
|collTag: '${collTag.key}'
|elemTag: '${elemTag.key}'""".stripMargin, Some(e))
}
}
preader.popHints()
preader.endCollection()
newArray
}
}
}
class Tuple2RTPickler(tag: FastTypeTag[_]) extends Pickler[(Any, Any)] with Unpickler[(Any, Any)] {
def tag = FastTypeTag[(Any, Any)]
def pickleField(name: String, value: Any, builder: PBuilder): Unit = {
val (tag1, pickler1) = if (value == null) {
(FastTypeTag.Null.asInstanceOf[FastTypeTag[Any]], Defaults.nullPickler.asInstanceOf[Pickler[Any]])
} else {
val clazz = value.getClass
val tag = FastTypeTag.mkRaw(clazz, reflectRuntime.currentMirror).asInstanceOf[FastTypeTag[Any]]
val pickler = RuntimePicklerLookup.genPickler(clazz.getClassLoader, clazz, tag).asInstanceOf[Pickler[Any]]
(tag, pickler)
}
builder.putField(name, b => {
b.hintTag(tag1)
pickler1.pickle(value, b)
})
}
def pickle(picklee: (Any, Any), builder: PBuilder): Unit = {
// println(s"@@@ using runtime ${this.getClass.getName}")
builder.beginEntry(picklee)
val fld1 = picklee._1
pickleField("_1", fld1, builder)
val fld2 = picklee._2
pickleField("_2", fld2, builder)
builder.endEntry()
// val specialPickler = new SpecialTuple2Pickler(tag1, pickler1, tag2, pickler2)
// SpecialTuple2Pickler.classSelection += ((class1 -> class2) -> Selection(specialPickler, tag))
// println(s"@@@ registered dynamic specialized pickler ${specialPickler.getClass.getName}")
}
def unpickleField(name: String, reader: PReader): Any = {
val reader1 = reader.readField(name)
val tag1 = reader1.beginEntry()
val value = {
if (reader1.atPrimitive) {
reader1.readPrimitive()
} else {
val unpickler1 = RuntimeUnpicklerLookup.genUnpickler(reflectRuntime.currentMirror, tag1)
try {
unpickler1.unpickle(tag1, reader1)
} catch {
case PicklingException(msg, cause) =>
throw PicklingException(s"""error in unpickle of '${this.getClass.getName}':
|field name: '$name'
|field tag: '${tag1}'
|message:
|$msg""".stripMargin, cause)
}
}
}
reader1.endEntry()
value
}
def unpickle(tag: String, reader: PReader): Any = {
val fld1 = unpickleField("_1", reader)
val fld2 = unpickleField("_2", reader)
(fld1, fld2)
}
}
|
phaller/pickling
|
core/src/main/scala/scala/pickling/runtime/CustomRuntime.scala
|
Scala
|
bsd-3-clause
| 9,892
|
package com.github.shadowsocks.plugin
import android.util.Log
import com.github.shadowsocks.utils.Commandline
import scala.collection.mutable
/**
* @author Mygod
*/
class PluginConfiguration(val pluginsOptions: Map[String, PluginOptions], val selected: String) {
private def this(plugins: Array[PluginOptions]) =
this(plugins.filter(_.id.nonEmpty).map(opt => opt.id -> opt).toMap, if (plugins.isEmpty) "" else plugins(0).id)
def this(plugin: String) = this(if (plugin == null) Array[PluginOptions]() else plugin.split("\\n").map {
case line if line.startsWith("kcptun ") =>
val opt = new PluginOptions()
opt.id = "kcptun"
try {
val args = mutable.Queue(Commandline.translateCommandline(line): _*)
args.dequeue()
while (args.nonEmpty) args.dequeue() match {
case "--nocomp" => opt.put("nocomp", null)
case option if option.startsWith("--") => opt.put(option.substring(2), args.dequeue())
case option => throw new IllegalArgumentException("Unknown kcptun parameter: " + option)
}
} catch {
case exc: Exception => Log.w("PluginConfiguration", exc.getMessage)
}
opt
case line => new PluginOptions(line)
})
def getOptions(id: String): PluginOptions = if (id.isEmpty) new PluginOptions() else pluginsOptions.get(id) match {
case Some(options) => options
case None => new PluginOptions(id, PluginManager.fetchPlugins()(id).defaultConfig)
}
def selectedOptions: PluginOptions = getOptions(selected)
override def toString: String = {
val result = new mutable.ListBuffer[PluginOptions]()
for ((id, opt) <- pluginsOptions) id match {
case this.selected => result.prepend(opt)
case _ => result.append(opt)
}
if (!pluginsOptions.contains(selected)) result.prepend(selectedOptions)
result.map(_.toString(false)).mkString("\\n")
}
}
|
hangox/shadowsocks-android
|
mobile/src/main/scala/com/github/shadowsocks/plugin/PluginConfiguration.scala
|
Scala
|
gpl-3.0
| 1,899
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.text.SimpleDateFormat
import java.util.Locale
import scala.collection.JavaConverters._
import org.apache.spark.SparkException
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
class CsvFunctionsSuite extends QueryTest with SharedSparkSession {
import testImplicits._
test("from_csv with empty options") {
val df = Seq("1").toDS()
val schema = "a int"
checkAnswer(
df.select(from_csv($"value", lit(schema), Map[String, String]().asJava)),
Row(Row(1)) :: Nil)
}
test("from_csv with option") {
val df = Seq("26/08/2015 18:00").toDS()
val schema = new StructType().add("time", TimestampType)
val options = Map("timestampFormat" -> "dd/MM/yyyy HH:mm")
checkAnswer(
df.select(from_csv($"value", schema, options)),
Row(Row(java.sql.Timestamp.valueOf("2015-08-26 18:00:00.0"))))
}
test("checking the columnNameOfCorruptRecord option") {
val columnNameOfCorruptRecord = "_unparsed"
val df = Seq("0,2013-111-11 12:13:14", "1,1983-08-04").toDS()
val schema = new StructType().add("a", IntegerType).add("b", DateType)
val schemaWithCorrField1 = schema.add(columnNameOfCorruptRecord, StringType)
val df2 = df
.select(from_csv($"value", schemaWithCorrField1, Map(
"mode" -> "Permissive", "columnNameOfCorruptRecord" -> columnNameOfCorruptRecord)))
checkAnswer(df2, Seq(
Row(Row(0, null, "0,2013-111-11 12:13:14")),
Row(Row(1, java.sql.Date.valueOf("1983-08-04"), null))))
}
test("schema_of_csv - infers schemas") {
checkAnswer(
spark.range(1).select(schema_of_csv(lit("0.1,1"))),
Seq(Row("struct<_c0:double,_c1:int>")))
checkAnswer(
spark.range(1).select(schema_of_csv("0.1,1")),
Seq(Row("struct<_c0:double,_c1:int>")))
}
test("schema_of_csv - infers schemas using options") {
val df = spark.range(1)
.select(schema_of_csv(lit("0.1 1"), Map("sep" -> " ").asJava))
checkAnswer(df, Seq(Row("struct<_c0:double,_c1:int>")))
}
test("to_csv - struct") {
val df = Seq(Tuple1(Tuple1(1))).toDF("a")
checkAnswer(df.select(to_csv($"a")), Row("1") :: Nil)
}
test("to_csv with option") {
val df = Seq(Tuple1(Tuple1(java.sql.Timestamp.valueOf("2015-08-26 18:00:00.0")))).toDF("a")
val options = Map("timestampFormat" -> "dd/MM/yyyy HH:mm").asJava
checkAnswer(df.select(to_csv($"a", options)), Row("26/08/2015 18:00") :: Nil)
}
test("from_csv invalid csv - check modes") {
withSQLConf(SQLConf.COLUMN_NAME_OF_CORRUPT_RECORD.key -> "_unparsed") {
val schema = new StructType()
.add("a", IntegerType)
.add("b", IntegerType)
.add("_unparsed", StringType)
val badRec = "\\""
val df = Seq(badRec, "2,12").toDS()
checkAnswer(
df.select(from_csv($"value", schema, Map("mode" -> "PERMISSIVE"))),
Row(Row(null, null, badRec)) :: Row(Row(2, 12, null)) :: Nil)
val exception1 = intercept[SparkException] {
df.select(from_csv($"value", schema, Map("mode" -> "FAILFAST"))).collect()
}.getMessage
assert(exception1.contains(
"Malformed records are detected in record parsing. Parse Mode: FAILFAST."))
val exception2 = intercept[SparkException] {
df.select(from_csv($"value", schema, Map("mode" -> "DROPMALFORMED")))
.collect()
}.getMessage
assert(exception2.contains(
"from_csv() doesn't support the DROPMALFORMED mode. " +
"Acceptable modes are PERMISSIVE and FAILFAST."))
}
}
test("from_csv uses DDL strings for defining a schema - java") {
val df = Seq("""1,"haa"""").toDS()
checkAnswer(
df.select(
from_csv($"value", lit("a INT, b STRING"), new java.util.HashMap[String, String]())),
Row(Row(1, "haa")) :: Nil)
}
test("roundtrip to_csv -> from_csv") {
val df = Seq(Tuple1(Tuple1(1)), Tuple1(null)).toDF("struct")
val schema = df.schema(0).dataType.asInstanceOf[StructType]
val options = Map.empty[String, String]
val readback = df.select(to_csv($"struct").as("csv"))
.select(from_csv($"csv", schema, options).as("struct"))
checkAnswer(df, readback)
}
test("roundtrip from_csv -> to_csv") {
val df = Seq(Some("1"), None).toDF("csv")
val schema = new StructType().add("a", IntegerType)
val options = Map.empty[String, String]
val readback = df.select(from_csv($"csv", schema, options).as("struct"))
.select(to_csv($"struct").as("csv"))
checkAnswer(df, readback)
}
test("infers schemas of a CSV string and pass to to from_csv") {
val in = Seq("""0.123456789,987654321,"San Francisco"""").toDS()
val options = Map.empty[String, String].asJava
val out = in.select(from_csv('value, schema_of_csv("0.1,1,a"), options) as "parsed")
val expected = StructType(Seq(StructField(
"parsed",
StructType(Seq(
StructField("_c0", DoubleType, true),
StructField("_c1", IntegerType, true),
StructField("_c2", StringType, true))))))
assert(out.schema == expected)
}
test("Support to_csv in SQL") {
val df1 = Seq(Tuple1(Tuple1(1))).toDF("a")
checkAnswer(df1.selectExpr("to_csv(a)"), Row("1") :: Nil)
}
test("parse timestamps with locale") {
Seq("en-US", "ko-KR", "zh-CN", "ru-RU").foreach { langTag =>
val locale = Locale.forLanguageTag(langTag)
val ts = new SimpleDateFormat("dd/MM/yyyy HH:mm").parse("06/11/2018 18:00")
val timestampFormat = "dd MMM yyyy HH:mm"
val sdf = new SimpleDateFormat(timestampFormat, locale)
val input = Seq(s"""${sdf.format(ts)}""").toDS()
val options = Map("timestampFormat" -> timestampFormat, "locale" -> langTag)
val df = input.select(from_csv($"value", lit("time timestamp"), options.asJava))
checkAnswer(df, Row(Row(java.sql.Timestamp.valueOf("2018-11-06 18:00:00.0"))))
}
}
}
|
techaddict/spark
|
sql/core/src/test/scala/org/apache/spark/sql/CsvFunctionsSuite.scala
|
Scala
|
apache-2.0
| 6,831
|
package mesosphere.marathon
package api.v2.validation
import com.wix.accord.scalatest.ResultMatchers
import mesosphere.{ UnitTest, ValidationTestLike }
import mesosphere.marathon.raml._
import mesosphere.UnitTest
class AppValidationTest extends UnitTest with ResultMatchers with ValidationTestLike {
import Normalization._
"network validation" when {
implicit val basicValidator = AppValidation.validateCanonicalAppAPI(Set.empty)
def networkedApp(portMappings: Seq[ContainerPortMapping], networks: Seq[Network]) = {
App(
id = "/foo",
cmd = Some("bar"),
networks = networks,
container = Some(Container(`type` = EngineType.Mesos, portMappings = Some(portMappings))))
}
def containerNetworkedApp(portMappings: Seq[ContainerPortMapping], networkCount: Int = 1) =
networkedApp(
portMappings,
networks = 1.to(networkCount).map { i => Network(mode = NetworkMode.Container, name = Some(i.toString)) })
"multiple container networks are specified for an app" should {
"require networkNames for hostPort to containerPort mapping" in {
val badApp = containerNetworkedApp(
Seq(ContainerPortMapping(hostPort = Option(0))), networkCount = 2)
basicValidator(badApp).normalize should failWith(
"/container/portMappings(0)" ->
AppValidationMessages.NetworkNameRequiredForMultipleContainerNetworks)
}
"allow portMappings that don't declare hostPort nor networkNames" in {
val badApp = containerNetworkedApp(
Seq(ContainerPortMapping()), networkCount = 2)
basicValidator(badApp) shouldBe (aSuccess)
}
"allow portMappings that both declare a hostPort and a networkNames" in {
val badApp = containerNetworkedApp(Seq(
ContainerPortMapping(
hostPort = Option(0),
networkNames = List("1"))), networkCount = 2)
basicValidator(badApp) shouldBe (aSuccess)
}
}
"single container network" should {
"consider a valid portMapping with a name as valid" in {
basicValidator(
containerNetworkedApp(
Seq(
ContainerPortMapping(
hostPort = Some(80),
containerPort = 80,
networkNames = List("1"))))) shouldBe (aSuccess)
}
"consider a portMapping with a hostPort and two valid networkNames as invalid" in {
basicValidator(
containerNetworkedApp(
Seq(
ContainerPortMapping(
hostPort = Some(80),
containerPort = 80,
networkNames = List("1", "2"))),
networkCount = 3)) should containViolation(
"/container/portMappings(0)" -> AppValidationMessages.NetworkNameRequiredForMultipleContainerNetworks)
}
"consider a portMapping with no name as valid" in {
basicValidator(
containerNetworkedApp(
Seq(
ContainerPortMapping(
hostPort = Some(80),
containerPort = 80,
networkNames = Nil)))) shouldBe (aSuccess)
}
"consider a portMapping without a hostport as valid" in {
basicValidator(
containerNetworkedApp(
Seq(
ContainerPortMapping(
hostPort = None)))) shouldBe (aSuccess)
}
"consider portMapping with zero hostport as valid" in {
basicValidator(
containerNetworkedApp(
Seq(
ContainerPortMapping(
containerPort = 80,
hostPort = Some(0))))) shouldBe (aSuccess)
}
"consider portMapping with a non-matching network name as invalid" in {
val result = basicValidator(
containerNetworkedApp(
Seq(
ContainerPortMapping(
containerPort = 80,
hostPort = Some(80),
networkNames = List("undefined-network-name")))))
result.isFailure shouldBe true
}
"consider portMapping without networkNames nor hostPort as valid" in {
basicValidator(
containerNetworkedApp(
Seq(
ContainerPortMapping(
containerPort = 80,
hostPort = None,
networkNames = Nil)))) shouldBe (aSuccess)
}
}
"general port validation" in {
basicValidator(
containerNetworkedApp(
Seq(
ContainerPortMapping(
name = Some("name"),
hostPort = Some(123)),
ContainerPortMapping(
name = Some("name"),
hostPort = Some(123))))).isFailure shouldBe true
}
"missing hostPort is allowed for bridge networking (so we can normalize it)" in {
// This isn't _actually_ allowed; we expect that normalization will replace the None to a Some(0) before
// converting to an AppDefinition, in order to support legacy API
val app = networkedApp(
portMappings = Seq(ContainerPortMapping(
containerPort = 8080,
hostPort = None,
servicePort = 0,
name = Some("foo"))),
networks = Seq(Network(mode = NetworkMode.ContainerBridge, name = None))
)
basicValidator(app) shouldBe (aSuccess)
}
}
}
|
natemurthy/marathon
|
src/test/scala/mesosphere/marathon/api/v2/validation/AppValidationTest.scala
|
Scala
|
apache-2.0
| 5,359
|
object Test {
trait Foo
class Bar {
object baz extends Foo
}
def frob[P1, P2<:Foo](f:P1 => P2) = ()
def main(args:Array[String]) : Unit = {
frob((p:Bar) => p.baz)
}
}
|
folone/dotty
|
tests/pos/t2444.scala
|
Scala
|
bsd-3-clause
| 192
|
/*
* Copyright 2015 Daniel Spiewak
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalaz.stream
package parsers
import org.specs2.mutable._
import scalaz.std.anyVal._
object StreamSpecs extends Specification {
import Parser.{completed, literalRichParser}
"parentheses stream parsing" should {
lazy val parens: Parser[Char, Int] = (
'(' ~> parens <~ ')' ^^ (1 +)
| completed(0)
)
"parse individual single parens" in {
val result = Process("()": _*).toSource pipe parse(parens) stripW
result.runLog.run mustEqual Seq(1, 0)
}
"parse multiple parens" in {
val result = Process("()()()()()()()()()()()()()": _*).toSource pipe parse(parens) stripW
result.runLog.run mustEqual Seq(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0)
}
"parse parens nested at arbitrary depth in sequence" in {
val result = Process("((()))(())()((((()))))((()))()(((())))": _*).toSource pipe parse(parens) stripW
result.runLog.run mustEqual Seq(3, 2, 1, 5, 3, 1, 4, 0)
}
}
}
|
djspiewak/sparse
|
src/test/scala/scalaz/stream/parsers/StreamSpecs.scala
|
Scala
|
apache-2.0
| 1,563
|
package crawl
import akka.actor._
import akka.event.LoggingAdapter
import scala.collection.mutable.ArrayBuffer
import scala.compat.Platform
/** Actor for collecting statistics information about how long lasts each web request.
* There should be only one single instance of it.
* @author Christoph Knabe
* @since 2014-07-07
*/
class StatisticsActor(task: String) extends Actor with ActorLogging {
val meta = StatisticsActor
private val archive = new ArrayBuffer[meta.Entry]()
private val startMillis: Long = Platform.currentTime
log.info(s"Start crawling by $task")
def receive: Receive = {
case entry: meta.Entry =>
archive.append(entry)
case meta.Finish =>
finish(log, context.system)
case unexpected =>
log.error(s"Received unexpected message:\\n$unexpected")
}
private def finish(log: LoggingAdapter, system: ActorSystem){
val endMillis = Platform.currentTime
val durationMillis = endMillis - startMillis
val sorted = archive.sortWith(_.durationMillis > _.durationMillis)
val summedUpMillis = archive.map(_.durationMillis).reduce(_+_)
val messages = sorted.map(_.asMessage).mkString("\\n")
log.info("\\n{}\\nSummary: Crawled {} URIs in {} millis (summedUp: {} millis).", messages, sorted.length, durationMillis, summedUpMillis)
system.shutdown()
}
}
/**Meta object for the StatisticsActor*/
object StatisticsActor {
/**Creates a StatisticsActor in the given ActorSystem with the given task description.*/
def apply(system: ActorSystem, task: String): ActorRef = {
system.actorOf(Props(classOf[StatisticsActor], task), name = "statistics")
}
/**A message understandable by the StatisticsActor.*/
sealed trait Message
/**A statistics entry collecting the information, how long lasted the getting of the page at the uri and how many bytes were contained in the response body.*/
case class Entry(durationMillis: Long, lengthBytes: Long, uri: String) extends Message {
def asMessage = s"$durationMillis millis for $lengthBytes Bytes from $uri"
}
/**A command to output the statistics and shut down the actor system.*/
case object Finish extends Message
}
|
ChristophKnabe/sprayreactivedemo
|
src/main/scala/crawl/StatisticsActor.scala
|
Scala
|
lgpl-3.0
| 2,174
|
/*
* Copyright 2007-2012 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb.sitemap
import net.liftweb.common._
import net.liftweb.http.{LiftRules, S}
import xml.{Elem, Text, NodeSeq}
import net.liftweb.util.Helpers
trait FlexMenuBuilder {
// a hack to use structural typing to get around the private[http] on Loc.buildItem
type StructBuildItem = {def buildItem(kids: List[MenuItem], current: Boolean, path: Boolean): Box[MenuItem]}
/**
* Override if you want a link to the current page
*/
def linkToSelf = false
/**
* Should all the menu items be expanded? Defaults to false
*/
def expandAll = false
/**
* Should any of the menu items be expanded?
*/
protected def expandAny = false
/**
* This is used to build a MenuItem for a single Loc
*/
protected def buildItemMenu[A](loc: Loc[A], currLoc: Box[Loc[_]], expandAll: Boolean): List[MenuItem] = {
val isInPath = currLoc.map { cur =>
def isInPath(loc: Loc[_]): Boolean = (cur == loc) || loc.menu.kids.exists(k => isInPath(k.loc))
isInPath(loc)
} openOr false
val kids: List[MenuItem] = if (expandAll) loc.buildKidMenuItems(loc.menu.kids) else Nil
loc.buildItem(kids, currLoc == Full(loc), isInPath).toList
}
/**
* Compute the MenuItems to be rendered by looking at the 'item' and 'group' attributes
*/
def toRender: Seq[MenuItem] = {
val res = (S.attr("item"), S.attr("group")) match {
case (Full(item), _) =>
for {
sm <- LiftRules.siteMap.toList
req <- S.request.toList
loc <- sm.findLoc(item).toList
item <- buildItemMenu(loc, req.location, expandAll)
} yield item
case (_, Full(group)) =>
for {
sm <- LiftRules.siteMap.toList
loc <- sm.locForGroup(group)
req <- S.request.toList
item <- buildItemMenu(loc, req.location, expandAll)
} yield item
case _ => renderWhat(expandAll)
}
res
}
/**
* If a group is specified and the group is empty what to display
*/
protected def emptyGroup: NodeSeq = NodeSeq.Empty
/**
* If the whole menu hierarchy is empty, what to display
*/
protected def emptyMenu: NodeSeq = Text("No Navigation Defined.")
/**
* What to display when the placeholder is empty (has no kids)
*/
protected def emptyPlaceholder: NodeSeq = NodeSeq.Empty
/**
* Take the incoming Elem and add any attributes based on
* path which is true if this Elem is the path to the current page
*/
protected def updateForPath(nodes: Elem, path: Boolean): Elem = nodes
/**
* Take the incoming Elem and add any attributes based on
* current which is a flag that indicates this is the currently viewed page
*/
protected def updateForCurrent(nodes: Elem, current: Boolean): Elem = nodes
/**
* By default, create an li for a menu item
*/
protected def buildInnerTag(contents: NodeSeq, path: Boolean, current: Boolean): Elem =
updateForCurrent(updateForPath(<li>{contents}</li>, path), current)
/**
* Render a placeholder
*/
protected def renderPlaceholder(item: MenuItem, renderInner: Seq[MenuItem] => NodeSeq): Elem = {
buildInnerTag(<xml:group><span>{item.text}</span>{renderInner(item.kids)}</xml:group>,
item.path, item.current)
}
/**
* Render a link that's the current link, but the "link to self" flag is set to true
*/
protected def renderSelfLinked(item: MenuItem, renderInner: Seq[MenuItem] => NodeSeq): Elem =
buildInnerTag(<xml:group>{renderLink(item.uri, item.text, item.path,
item.current)}{renderInner(item.kids)}</xml:group>, item.path, item.current)
/**
* Render the currently selected menu item, but with no a link back to self
*/
protected def renderSelfNotLinked(item: MenuItem, renderInner: Seq[MenuItem] => NodeSeq): Elem =
buildInnerTag(<xml:group>{renderSelf(item)}{renderInner(item.kids)}</xml:group>, item.path, item.current)
/**
* Render the currently selected menu item
*/
protected def renderSelf(item: MenuItem): NodeSeq = <span>{item.text}</span>
/**
* Render a generic link
*/
protected def renderLink(uri: NodeSeq, text: NodeSeq, path: Boolean, current: Boolean): NodeSeq =
<a href={uri}>{text}</a>
/**
* Render an item in the current path
*/
protected def renderItemInPath(item: MenuItem, renderInner: Seq[MenuItem] => NodeSeq): Elem =
buildInnerTag(<xml:group>{renderLink(item.uri, item.text, item.path,
item.current)}{renderInner(item.kids)}</xml:group>, item.path, item.current)
/**
* Render a menu item that's neither in the path nor
*/
protected def renderItem(item: MenuItem, renderInner: Seq[MenuItem] => NodeSeq): Elem =
buildInnerTag(<xml:group>{renderLink(item.uri, item.text, item.path,
item.current)}{renderInner(item.kids)}</xml:group>, item.path, item.current)
/**
* Render the outer tag for a group of menu items
*/
protected def renderOuterTag(inner: NodeSeq, top: Boolean): NodeSeq = <ul>{inner}</ul>
/**
* The default set of MenuItems to be rendered
*/
protected def renderWhat(expandAll: Boolean): Seq[MenuItem] =
(if (expandAll)
for {
sm <- LiftRules.siteMap;
req <- S.request
} yield sm.buildMenu(req.location).lines
else S.request.map(_.buildMenu.lines)) openOr Nil
def render: NodeSeq = {
val level: Box[Int] = for (lvs <- S.attr("level"); i <- Helpers.asInt(lvs)) yield i
val toRender: Seq[MenuItem] = this.toRender
def ifExpandCurrent(f: => NodeSeq): NodeSeq = if (expandAny || expandAll) f else NodeSeq.Empty
def ifExpandAll(f: => NodeSeq): NodeSeq = if (expandAll) f else NodeSeq.Empty
toRender.toList match {
case Nil if S.attr("group").isDefined => emptyGroup
case Nil => emptyMenu
case xs =>
def buildANavItem(i: MenuItem): NodeSeq = {
i match {
// Per Loc.PlaceHolder, placeholder implies HideIfNoKids
case m@MenuItem(text, uri, kids, _, _, _) if m.placeholder_? && kids.isEmpty => emptyPlaceholder
case m@MenuItem(text, uri, kids, _, _, _) if m.placeholder_? => renderPlaceholder(m, buildLine _)
case m@MenuItem(text, uri, kids, true, _, _) if linkToSelf => renderSelfLinked(m, k => ifExpandCurrent(buildLine(k)))
case m@MenuItem(text, uri, kids, true, _, _) => renderSelfNotLinked(m, k => ifExpandCurrent(buildLine(k)))
// Not current, but on the path, so we need to expand children to show the current one
case m@MenuItem(text, uri, kids, _, true, _) => renderItemInPath(m, buildLine _)
case m =>renderItem(m, buildLine _)
}
}
def buildLine(in: Seq[MenuItem]): NodeSeq = buildUlLine(in, false)
def buildUlLine(in: Seq[MenuItem], top: Boolean): NodeSeq =
if (in.isEmpty) {
NodeSeq.Empty
} else {
renderOuterTag(in.flatMap(buildANavItem), top)
}
val realMenuItems = level match {
case Full(lvl) if lvl > 0 =>
def findKids(cur: Seq[MenuItem], depth: Int): Seq[MenuItem] = if (depth == 0) cur
else findKids(cur.flatMap(mi => mi.kids), depth - 1)
findKids(xs, lvl)
case _ => xs
}
buildUlLine(realMenuItems, true)
}
}
}
|
lzpfmh/framework-2
|
web/webkit/src/main/scala/net/liftweb/sitemap/FlexMenuBuilder.scala
|
Scala
|
apache-2.0
| 7,925
|
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtInteger, Linked}
case class CP274(value: Int) extends CtBoxIdentifier("Qualifying expenditure other machinery and plant") with CtInteger
object CP274 extends Linked[CP253, CP274]{
override def apply(source: CP253): CP274 = CP274(source.value)
}
|
liquidarmour/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/computations/CP274.scala
|
Scala
|
apache-2.0
| 939
|
/*
* Copyright (c) 2013 Christos KK Loverdos
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ckkloverdos.thrift3r.tests.misc
import com.ckkloverdos.thrift3r.tests.BaseFixture
import org.junit.Test
/**
*
* @author Christos KK Loverdos <loverdos@gmail.com>
*/
class MiscTest extends BaseFixture {
@Test def testClass() { good(BeanClass(classOf[MiscTest])) }
@Test def testOptionIntNone() { badThrift(BeanOptionInt(None)) }
@Test def testOptionIntSome() { badThrift(BeanOptionInt(Some(1))) }
@Test def testOptionIntRefSome() { good(BeanOptionIntRef(Some(1))) }
@Test def testOptionIntRefNone() { good(BeanOptionIntRef(None)) }
@Test def testOptionStructSomeSome() { good(BeanOptionStruct(Some(BeanOptionString(Some("yes"))))) }
@Test def testOptionStructSomeNone() { good(BeanOptionStruct(Some(BeanOptionString(None)))) }
@Test def testOptionStructNone() { good(BeanOptionStruct(None)) }
}
|
loverdos/thrift3r
|
src/test/scala/com/ckkloverdos/thrift3r/tests/misc/MiscTest.scala
|
Scala
|
apache-2.0
| 1,441
|
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.lambda.tools.stats
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.lambda.data.LambdaDataStore
import org.locationtech.geomesa.lambda.tools.stats.LambdaStatsTopKCommand.LambdaStatsTopKParams
import org.locationtech.geomesa.lambda.tools.{LambdaDataStoreCommand, LambdaDataStoreParams}
import org.locationtech.geomesa.tools.stats.StatsTopKCommand
import org.locationtech.geomesa.tools.stats.StatsTopKCommand.StatsTopKParams
class LambdaStatsTopKCommand extends StatsTopKCommand[LambdaDataStore] with LambdaDataStoreCommand {
override val params = new LambdaStatsTopKParams
}
object LambdaStatsTopKCommand {
@Parameters(commandDescription = "Enumerate the most frequent values in a GeoMesa feature type")
class LambdaStatsTopKParams extends StatsTopKParams with LambdaDataStoreParams
}
|
elahrvivaz/geomesa
|
geomesa-lambda/geomesa-lambda-tools/src/main/scala/org/locationtech/geomesa/lambda/tools/stats/LambdaStatsTopKCommand.scala
|
Scala
|
apache-2.0
| 1,313
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes
import org.apache.flink.table.planner.plan.utils.ExpressionDetail.ExpressionDetail
import org.apache.flink.table.planner.plan.utils.ExpressionFormat.ExpressionFormat
import org.apache.flink.table.planner.plan.utils.{ExpressionDetail, ExpressionFormat, FlinkRexUtil, RelDescriptionWriterImpl}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rex._
import org.apache.calcite.sql.SqlExplainLevel
import java.io.{PrintWriter, StringWriter}
/**
* Base class for flink relational expression.
*/
trait FlinkRelNode extends RelNode {
/**
* Returns a string which describes the detailed information of relational expression
* with attributes which contribute to the plan output.
*
* This method leverages [[RelNode#explain]] with
* [[org.apache.calcite.sql.SqlExplainLevel.EXPPLAN_ATTRIBUTES]] explain level to generate
* the description.
*/
def getRelDetailedDescription: String = {
val sw = new StringWriter
val pw = new PrintWriter(sw)
val relWriter = new RelDescriptionWriterImpl(pw)
this.explain(relWriter)
sw.toString
}
private[flink] def getExpressionString(
expr: RexNode,
inFields: List[String],
localExprsTable: Option[List[RexNode]],
sqlExplainLevel: SqlExplainLevel): String = {
getExpressionString(expr, inFields, localExprsTable, ExpressionFormat.Prefix, sqlExplainLevel)
}
private[flink] def getExpressionString(
expr: RexNode,
inFields: List[String],
localExprsTable: Option[List[RexNode]],
expressionDetail: ExpressionDetail): String = {
getExpressionString(expr, inFields, localExprsTable, ExpressionFormat.Prefix, expressionDetail)
}
private[flink] def getExpressionString(
expr: RexNode,
inFields: List[String],
localExprsTable: Option[List[RexNode]],
expressionFormat: ExpressionFormat,
sqlExplainLevel: SqlExplainLevel): String = {
getExpressionString(
expr, inFields, localExprsTable, expressionFormat, convertToExpressionDetail(sqlExplainLevel))
}
private[flink] def getExpressionString(
expr: RexNode,
inFields: List[String],
localExprsTable: Option[List[RexNode]],
expressionFormat: ExpressionFormat,
expressionDetail: ExpressionDetail): String = {
FlinkRexUtil.getExpressionString(
expr, inFields, localExprsTable, expressionFormat, expressionDetail)
}
private[flink] def convertToExpressionDetail(
sqlExplainLevel: SqlExplainLevel): ExpressionDetail = {
sqlExplainLevel match {
case SqlExplainLevel.EXPPLAN_ATTRIBUTES => ExpressionDetail.Explain
case _ => ExpressionDetail.Digest
}
}
}
|
apache/flink
|
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/nodes/FlinkRelNode.scala
|
Scala
|
apache-2.0
| 3,530
|
package se.culvertsoft.mgen.visualdesigner.util
import java.awt.Component
import java.awt.event.KeyEvent
import javax.swing.SwingUtilities
object ZwingUtils {
def hasAncestor(c: Component, potentialAncestor: Component): Boolean = {
SwingUtilities.isDescendingFrom(c, potentialAncestor)
}
def originatesFromChild(e: KeyEvent, potentialAncestor: Component): Boolean = {
e.getSource() match {
case c: Component => hasAncestor(c, potentialAncestor)
case _ => false
}
}
}
|
culvertsoft/mgen-visualdesigner
|
src/main/scala/se/culvertsoft/mgen/visualdesigner/util/ZwingUtils.scala
|
Scala
|
gpl-2.0
| 505
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.system.hdfs
import org.apache.samza.SamzaException
import org.apache.samza.config.Config
import org.apache.samza.metrics.MetricsRegistry
import org.apache.samza.system.SystemFactory
import org.apache.samza.util.{KafkaUtil,Logging}
class HdfsSystemFactory extends SystemFactory with Logging {
def getConsumer(systemName: String, config: Config, registry: MetricsRegistry) = {
throw new SamzaException("HdfsSystemFactory does not implement a consumer")
}
def getProducer(systemName: String, config: Config, registry: MetricsRegistry) = {
val clientId = KafkaUtil.getClientId("samza-producer", config)
val metrics = new HdfsSystemProducerMetrics(systemName, registry)
new HdfsSystemProducer(systemName, clientId, config, metrics)
}
def getAdmin(systemName: String, config: Config) = {
new HdfsSystemAdmin
}
}
|
davidzchen/samza
|
samza-hdfs/src/main/scala/org/apache/samza/system/hdfs/HdfsSystemFactory.scala
|
Scala
|
apache-2.0
| 1,680
|
/*
* Copyright 2017 Secure Decisions, a division of Applied Visions, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This material is based on research sponsored by the Department of Homeland
* Security (DHS) Science and Technology Directorate, Cyber Security Division
* (DHS S&T/CSD) via contract number HHSP233201600058C.
*/
package com.secdec.codepulse.tracer.snippet
import com.secdec.codepulse.tracer.ProjectManager
import com.secdec.codepulse.util.comet.PublicCometInit
import net.liftweb.http.js.JE.JsFunc
import net.liftweb.http.js.JE.JsVar
import net.liftweb.http.js.JsCmds.jsExpToJsCmd
import net.liftweb.http.js.JsExp.strToJsExp
import net.liftweb.http.js.jquery.JqJE.Jq
import reactive.ForeachableForwardable
import reactive.Observing
/** A super simple comet actor that calls `$(document).trigger('projectUpdated')`
* whenever the given `projectManager` fires an event on its `projectListUpdates` stream.
*/
class ProjectUpdated(projectManager: ProjectManager) extends PublicCometInit with Observing {
// no visible components
def render = Nil
override def localSetup() = {
super.localSetup()
projectManager.projectListUpdates ->> partialUpdate {
Jq(JsVar("document")) ~> JsFunc("trigger", "projectUpdated")
}
}
}
|
secdec/codepulse
|
codepulse/src/main/scala/com/secdec/codepulse/tracer/snippet/ProjectUpdated.scala
|
Scala
|
apache-2.0
| 1,773
|
package org.openurp.edu.eams.teach.lesson.task.service.impl
import java.text.MessageFormat
import java.util.LinkedList
import org.beangle.commons.collection.Collections
import org.beangle.commons.dao.impl.BaseServiceImpl
import org.beangle.data.jpa.dao.OqlBuilder
import org.beangle.data.model.Entity
import org.beangle.commons.lang.Strings
import org.openurp.base.Department
import org.openurp.base.Semester
import org.openurp.edu.base.Adminclass
import org.openurp.edu.eams.core.CommonAuditState
import org.openurp.edu.base.Project
import org.openurp.edu.base.Teacher
import org.openurp.edu.base.code.StdType
import org.openurp.edu.eams.core.service.SemesterService
import org.openurp.edu.eams.system.security.DataRealm
import org.openurp.edu.base.code.CourseType
import org.openurp.edu.teach.schedule.CourseActivity
import org.openurp.edu.teach.lesson.Lesson
import org.openurp.edu.eams.teach.lesson.service.LessonLimitService
import org.openurp.edu.eams.teach.lesson.task.dao.LessonStatDao
import org.openurp.edu.eams.teach.lesson.task.service.LessonStatService
import org.openurp.edu.eams.teach.lesson.task.util.TaskOfCourseType
import org.openurp.edu.teach.plan.CourseGroup
import org.openurp.edu.teach.plan.MajorPlan
import org.openurp.edu.eams.teach.program.major.service.MajorPlanService
import org.openurp.edu.eams.teach.program.util.PlanUtils
import org.openurp.edu.eams.teach.time.util.TermCalculator
import org.openurp.edu.eams.teach.util.AdminclassQueryBuilder
import org.openurp.edu.eams.util.stat.StatGroup
import org.openurp.edu.eams.util.stat.StatHelper
import org.openurp.edu.eams.util.stat.StatItem
import org.openurp.edu.teach.lesson.LessonLimitMeta
import org.openurp.edu.teach.code.LessonTag
import org.openurp.code.job.ProfessionalTitle
class LessonStatServiceImpl extends BaseServiceImpl with LessonStatService {
var lessonStatDao: LessonStatDao = _
var lessonLimitService: LessonLimitService = _
def countByAdminclass(project: Project, semester: Semester, dataRealm: DataRealm): Seq[_] = {
var query = OqlBuilder.from(classOf[Lesson], "lesson").select("distinct lesson")
.join("lesson.teachClass.limitGroups", "lgroup")
.join("lgroup.items", "litem")
.where("litem.meta.id = :metaId", LessonLimitMeta.Adminclass.id)
.where("litem.content like '_%'")
.where("not exists (select tag.id from lesson.tags tag where tag.id=:guaPai)", LessonTag.PredefinedTags.GUAPAI.id)
.where("lesson.semester = :semester", semester)
.where("lesson.project = :project", project)
.where("lesson.teachDepart.id in (:departIds)", Strings.splitToInt(dataRealm.departmentIdSeq))
var commonLessons = entityDao.search(query)
var tmpMap = Collections.newMap[Integer, StatItem]
for (lesson <- commonLessons) {
val adminclasses = lessonLimitService.extractAdminclasses(lesson.getTeachClass)
for (adminclass <- adminclasses) {
var item = tmpMap.get(adminclass.id)
if (item == null) {
item = new StatItem(adminclass.id, 0, 0f, 0f, 0f)
tmpMap.put(adminclass.id, item)
}
val countors = item.getCountors
countors(0) = countors(0).asInstanceOf[java.lang.Integer] + 1
countors(1) = countors(1).asInstanceOf[java.lang.Float] + lesson.getCourse.getWeekHour
countors(2) = countors(2).asInstanceOf[java.lang.Float] + lesson.getCourse.getPeriod
countors(3) = countors(3).asInstanceOf[java.lang.Float] + lesson.getCourse.getCredits
}
}
val commonTasks = tmpMap.values
query = OqlBuilder.from(classOf[Lesson], "lesson").select("distinct lesson")
.join("lesson.teachClass.limitGroups", "lgroup")
.join("lgroup.items", "litem")
.where("litem.meta.id = :metaId", LessonLimitMeta.Adminclass.id)
.where("litem.content like '_%'")
.where("exists (select tag.id from lesson.tags tag where tag.id=:guaPai)", LessonTag.PredefinedTags.GUAPAI.id)
.where("lesson.semester = :semester", semester)
.where("lesson.project = :project", project)
.where("lesson.teachDepart.id in (:departIds)", Strings.splitToInt(dataRealm.departmentIdSeq))
commonLessons = entityDao.search(query)
tmpMap = Collections.newMap[Integer, StatItem]
for (lesson <- commonLessons) {
val adminclasses = lessonLimitService.extractAdminclasses(lesson.getTeachClass)
for (adminclass <- adminclasses) {
var item = tmpMap.get(adminclass.id)
if (item == null) {
item = new StatItem(adminclass.id, 0, 0f, 0f, 0f)
tmpMap.put(adminclass.id, item)
}
val countors = item.getCountors
countors(0) = countors(0).asInstanceOf[java.lang.Integer] + 1
countors(1) = countors(1).asInstanceOf[java.lang.Float] + lesson.getCourse.getWeekHour
countors(2) = countors(2).asInstanceOf[java.lang.Float] +
lesson.getCourse.getWeekHour * lesson.getCourseSchedule.getWeeks
countors(3) = countors(3).asInstanceOf[java.lang.Float] + lesson.getCourse.getCredits
}
}
val gpTasks = tmpMap.values
val statMap = Collections.newMap[Any, Any]
var iter = commonTasks.iterator
while (iter.hasNext) {
val element = iter.next().asInstanceOf[StatItem]
statMap.put(element.what, element)
}
iter = gpTasks.iterator
while (iter.hasNext) {
val pgStat = iter.next().asInstanceOf[StatItem]
val stat = statMap.get(pgStat.what).asInstanceOf[StatItem]
if (null == stat) {
statMap.put(pgStat.what, new StatItem(pgStat.what, java.lang.Integer.valueOf(1), pgStat.countors(1),
pgStat.countors(2), pgStat.countors(3)))
} else {
stat.countors(0) = java.lang.Integer.valueOf(stat.countors(0).asInstanceOf[Number].intValue() +
1)
stat.countors(1) = new java.lang.Float(pgStat.countors(1).asInstanceOf[Number].floatValue() +
stat.countors(1).asInstanceOf[Number].floatValue())
stat.countors(2) = java.lang.Integer.valueOf(pgStat.countors(2).asInstanceOf[Number].intValue() +
stat.countors(2).asInstanceOf[Number].intValue())
stat.countors(3) = new java.lang.Float(pgStat.countors(3).asInstanceOf[Number].floatValue() +
stat.countors(3).asInstanceOf[Number].floatValue())
}
}
val adminClasses = entityDao.findBy(classOf[Adminclass], "id", statMap.keySet)
for (adminClass <- adminClasses) {
val stat = statMap.get(adminClass.id).asInstanceOf[StatItem]
stat.what = adminClass
}
Collections.newBuffer(statMap.values)
}
def countByTeacher(project: Project, semester: Semester, dataRealm: DataRealm): Seq[_] = {
val query = OqlBuilder.from(classOf[Lesson], "lesson")
query.select("select new org.openurp.edu.eams.util.stat.StatItem(" +
"teacher.id," +
"count(*)," +
"sum(lesson.course.weekHour)," +
"sum(lesson.course.weekHour * (lesson.schedule.endWeek - lesson.schedule.startWeek + 1))," +
"sum(lesson.course.credits)" +
")")
.where("lesson.semester = :semester", semester)
.where("lesson.project = :project", project)
.join("lesson.teachers", "teacher")
.groupBy("teacher.id")
addLessonDataRealm(query, dataRealm)
val stats = entityDao.search(query)
setStatEntities(stats, classOf[Teacher])
}
def countByCourseType(project: Project, semester: Semester, dataRealm: DataRealm): Seq[_] = {
val query = OqlBuilder.from(classOf[Lesson], "lesson")
query.select("select new org.openurp.edu.eams.util.stat.StatItem(" +
"lesson.courseType.id," +
"count(*)," +
"sum(lesson.course.weekHour)," +
"sum(lesson.course.weekHour * (lesson.schedule.endWeek - lesson.schedule.startWeek + 1))," +
"sum(lesson.course.credits)" +
")")
.where("lesson.semester=:semester", semester)
.where("lesson.project = :project", project)
.groupBy("lesson.courseType.id")
addLessonDataRealm(query, dataRealm)
val stats = entityDao.search(query)
setStatEntities(stats, classOf[CourseType])
}
def countByStdType(project: Project, semester: Semester, dataRealm: DataRealm): Seq[_] = {
val entityQuery = OqlBuilder.from(classOf[Lesson], "lesson")
entityQuery.select("select new org.openurp.edu.eams.util.stat.StatItem(" +
"lesson.teachClass.stdType.id," +
"count(*)," +
"sum(lesson.course.weekHour)," +
"sum(lesson.course.weekHour * (lesson.schedule.endWeek - lesson.schedule.startWeek + 1))," +
"sum(lesson.course.credits)" +
")")
.where("lesson.semester = :semester", semester)
.where("lesson.project = :project", project)
.groupBy("lesson.teachClass.stdType.id")
val stats = entityDao.search(entityQuery)
setStatEntities(stats, classOf[StdType])
}
def countByTeachDepart(project: Project, semester: Semester, dataRealm: DataRealm): Seq[_] = {
val entityQuery = OqlBuilder.from(classOf[Lesson], "lesson")
entityQuery.select("select new org.openurp.edu.eams.util.stat.StatItem(" +
"lesson.teachDepart.id," +
"count(*)," +
"sum(lesson.course.weekHour)," +
"sum(lesson.course.weekHour * (lesson.schedule.endWeek - lesson.schedule.startWeek + 1))," +
"sum(lesson.course.credits)" +
")")
.where("lesson.semester = :semester", semester)
.where("lesson.project = :project", project)
.groupBy("lesson.teachDepart.id")
addLessonDataRealm(entityQuery, dataRealm)
val stats = entityDao.search(entityQuery)
setStatEntities(stats, classOf[Department])
}
def statCourseTypeConfirm(project: Project, semester: Semester, dataRealm: DataRealm): Seq[_] = {
statTaskConfirm(project, semester, dataRealm, "lesson.courseType.id", classOf[CourseType])
}
def statTeachDepartConfirm(project: Project, semester: Semester, dataRealm: DataRealm): Seq[_] = {
statTaskConfirm(project, semester, dataRealm, "lesson.teachDepart.id", classOf[Department])
}
private def statQuery(project: Project,
semester: Semester,
dataRealm: DataRealm,
item: String,
index: Int): OqlBuilder[StatItem] = {
val entityQuery = OqlBuilder.from(classOf[Lesson], "lesson")
val arr = Array("0", "0", "0", "0")
arr(index) = "count(*)"
entityQuery.select("select new org.openurp.edu.eams.util.stat.StatItem(" +
item +
"," +
Strings.join(arr, ",") +
")")
.where("lesson.semester = :semester", semester)
.where("lesson.project = :project", project)
.groupBy(item)
addLessonDataRealm(entityQuery, dataRealm)
entityQuery.asInstanceOf[OqlBuilder[StatItem]]
}
private def statTaskConfirm(project: Project,
semester: Semester,
dataRealm: DataRealm,
item: String,
entityClass: Class[_]): Seq[_] = {
var entityQuery = statQuery(project, semester, dataRealm, item, 0)
entityQuery.where("lesson.auditStatus = :status", CommonAuditState.UNSUBMITTED)
val unsubmittedStats = entityDao.search(entityQuery)
val statMap = buildStatMap(unsubmittedStats)
entityQuery = statQuery(project, semester, dataRealm, item, 1)
entityQuery.where("lesson.auditStatus = :status", CommonAuditState.SUBMITTED)
val submittedStats = entityDao.search(entityQuery)
entityQuery = statQuery(project, semester, dataRealm, item, 2)
entityQuery.where("lesson.auditStatus = :status", CommonAuditState.ACCEPTED)
val acceptedStats = entityDao.search(entityQuery)
entityQuery = statQuery(project, semester, dataRealm, item, 3)
entityQuery.where("lesson.auditStatus = :status", CommonAuditState.REJECTED)
val rejectedStats = entityDao.search(entityQuery)
for (submitted <- submittedStats) {
val existItem = statMap.get(submitted.what).asInstanceOf[StatItem]
if (existItem == null) {
statMap.put(submitted.what, submitted)
} else {
existItem.countors(1) = submitted.countors(1)
}
}
for (accepted <- acceptedStats) {
val existItem = statMap.get(accepted.what).asInstanceOf[StatItem]
if (existItem == null) {
statMap.put(accepted.what, accepted)
} else {
existItem.countors(2) = accepted.countors(2)
}
}
for (rejected <- rejectedStats) {
val existItem = statMap.get(rejected.what).asInstanceOf[StatItem]
if (existItem == null) {
statMap.put(rejected.what, rejected)
} else {
existItem.countors(3) = rejected.countors(3)
}
}
setStatEntities(statMap, entityClass)
}
private def buildStatMap(stats: Iterable[_]): collection.mutable.Map[Any, Any] = {
val statMap = Collections.newMap[Any, Any]
var iter = stats.iterator
while (iter.hasNext) {
val element = iter.next().asInstanceOf[StatItem]
statMap.put(element.what, element)
}
statMap
}
private def setStatEntities(statMap: collection.mutable.Map[Any, Any], entityClass: Class[_]): Seq[_] = {
val ec = entityClass.asInstanceOf[Class[Entity[_]]]
val entities = entityDao.findBy(ec, "id", statMap.keySet)
var iter = entities.iterator
while (iter.hasNext) {
val entity = iter.next().asInstanceOf[Entity[_]]
val stat = statMap.get(entity.id).asInstanceOf[StatItem]
stat.what = entity
}
Collections.newBuffer(statMap.values)
}
private def setStatEntities(stats: Iterable[_], entityClass: Class[_]): Seq[_] = {
val statMap = buildStatMap(stats)
setStatEntities(statMap, entityClass)
}
private def addAdminclassDataRealm(query: OqlBuilder[_], dataRealm: DataRealm) {
if (null != dataRealm) {
if (Strings.isNotBlank(dataRealm.studentTypeIdSeq)) {
query.where(MessageFormat.format("adminClass.stdType.id (:stdTypeIds{0})", new java.lang.Long(System.currentTimeMillis())),
Strings.splitToInt(dataRealm.studentTypeIdSeq))
}
if (Strings.isNotBlank(dataRealm.departmentIdSeq)) {
query.where(MessageFormat.format("adminClass.department.id in(:departIds)", new java.lang.Long(System.currentTimeMillis())),
Strings.splitToInt(dataRealm.departmentIdSeq))
}
}
}
private def addLessonDataRealm(query: OqlBuilder[_], dataRealm: DataRealm) {
if (null != dataRealm) {
if (Strings.isNotBlank(dataRealm.departmentIdSeq)) {
query.where("lesson.teachDepart.id in(:departIds)", Strings.splitToInt(dataRealm.departmentIdSeq))
}
}
}
def statTeacherTitle(project: Project, semesters: List[_]): List[_] = {
val stats = lessonStatDao.statTeacherTitle(semesters)
new StatHelper(entityDao).replaceIdWith(stats, Array(classOf[Semester], classOf[ProfessionalTitle]))
stats
}
def setLessonStatDao(lessonStatDao: LessonStatDao) {
this.lessonStatDao = lessonStatDao
}
private var majorPlanService: MajorPlanService = _
private var semesterService: SemesterService = _
def getTaskOfCourseTypes(project: Project,
semester: Semester,
dataRealm: DataRealm,
courseTypes: Iterable[CourseType]): Seq[TaskOfCourseType] = {
val courseTypeSet = Collections.newSet[CourseType]
courseTypeSet ++= (courseTypes)
var plans: Seq[MajorPlan] = null
if (!Strings.isEmpty(dataRealm.studentTypeIdSeq) && !Strings.isEmpty(dataRealm.departmentIdSeq)) {
val now = new java.util.Date()
val planQuery = OqlBuilder.from(classOf[MajorPlan], "plan")
planQuery.where("plan.program.stdType.id in (:stdTypeIds)", Strings.splitToInt(dataRealm.studentTypeIdSeq))
.where("plan.program.department.id in (:departIds)", Strings.splitToInt(dataRealm.departmentIdSeq))
.where("plan.program.major.project = :project", project)
.where(":now < plan.program.invalidOn or plan.program.invalidOn is null", now)
plans = entityDao.search(planQuery)
}
val termCalc = new TermCalculator(semesterService, semester)
val taskOfCourseTypes = Collections.newBuffer[TaskOfCourseType]
for (plan <- plans) {
val adminClasses = entityDao.search(AdminclassQueryBuilder.build(plan))
val term = termCalc.getTerm(plan.program.beginOn, true)
if (term < 0 || term > plan.terms.intValue) {
//continue
}
var iterator = plan.groups.iterator
while (iterator.hasNext) {
val group = iterator.next()
val credits = PlanUtils.getGroupCredits(group, term)
val work = courseTypeSet.contains(group.courseType) || !Collections.isNotEmpty(group.planCourses) || credits != 0f
if (work) {
for (adminClass <- adminClasses) {
taskOfCourseTypes += new TaskOfCourseType(group.courseType, adminClass, credits)
}
}
}
}
taskOfCourseTypes
}
}
|
openurp/edu-eams-webapp
|
schedule/src/main/scala/org/openurp/edu/eams/teach/lesson/task/service/impl/LessonStatServiceImpl.scala
|
Scala
|
gpl-3.0
| 16,665
|
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.core
import org.ensime.api._
import org.ensime.fixture._
import org.ensime.util.EnsimeSpec
import scala.reflect.internal.util.{ OffsetPosition, RangePosition }
class ImplicitAnalyzerSpec extends EnsimeSpec
with IsolatedRichPresentationCompilerFixture
with RichPresentationCompilerTestUtils
with ReallyRichPresentationCompilerFixture {
def original = EnsimeConfigFixture.EmptyTestProject
def getImplicitDetails(cc: RichPresentationCompiler, content: String) = {
val file = srcFile(cc.config, "abc.scala", contents(content))
cc.askLoadedTyped(file)
val pos = new RangePosition(file, 0, 0, file.length)
val dets = new ImplicitAnalyzer(cc).implicitDetails(pos)
dets.map {
case c: ImplicitConversionInfo => (
"conversion",
content.substring(c.start, c.end),
c.fun.name
)
case c: ImplicitParamInfo => (
"param",
content.substring(c.start, c.end),
c.fun.name,
c.params.map { p => p.name },
c.funIsImplicit
)
}
}
"ImplicitAnalyzer" should "render implicit conversions" in {
withPresCompiler { (config, cc) =>
val dets = getImplicitDetails(
cc,
"""
package com.example
class Test {}
object I {
implicit def StringToTest(v: String): Test = new Test
val t: Test = "sample";
}
"""
)
dets should ===(List(
("conversion", "\\"sample\\"", "StringToTest")
))
}
}
it should "render implicit parameters passed to implicit conversion functions" in {
withPresCompiler { (config, cc) =>
val dets = getImplicitDetails(
cc,
"""
package com.example
class Test {}
class Thing {}
object I {
implicit def myThing = new Thing
implicit def StringToTest(v: String)(implicit th: Thing): Test = new Test
val t: Test = "sample"
}
"""
)
dets should ===(List(
("param", "\\"sample\\"", "StringToTest", List("myThing"), true),
("conversion", "\\"sample\\"", "StringToTest")
))
}
}
it should "render implicit parameters" in {
withPresCompiler { (config, cc) =>
val dets = getImplicitDetails(
cc,
"""
package com.example
class Thing {}
class Thong {}
object I {
implicit def myThing = new Thing
implicit val myThong = new Thong
def zz(u: Int)(v: String)(implicit s: Thing, t: Thong) = u
def yy(implicit s: Thing) = s
val t = zz(1)("abc") // Two explicit applications
val z = yy // Zero explicit application
}
"""
)
dets should ===(List(
("param", "zz(1)(\\"abc\\")", "zz", List("myThing", "myThong"), false),
("param", "yy", "yy", List("myThing"), false)
))
}
}
it should "work with offset positions" in {
withPresCompiler { (config, cc) =>
val content = """
package com.example
class Test {}
object I {
implicit def StringToTest(v: String): Test = new Test
val t: Test = "sample"/*1*/;
}
"""
val file = srcFile(cc.config, "abc.scala", content)
cc.askLoadedTyped(file)
val implicitPos = content.indexOf("/*1*/")
val pos = new OffsetPosition(file, implicitPos)
val dets = new ImplicitAnalyzer(cc).implicitDetails(pos)
dets should have length 1
val pos1 = new OffsetPosition(file, implicitPos + 1)
val dets1 = new ImplicitAnalyzer(cc).implicitDetails(pos1)
dets1 shouldBe empty
}
}
}
|
espinhogr/ensime-server
|
core/src/it/scala/org/ensime/core/ImplicitAnalyzerSpec.scala
|
Scala
|
gpl-3.0
| 3,942
|
package com.meteorcode.pathway.io
import java.io.{
File,
InputStream,
OutputStream,
FileInputStream,
FileOutputStream,
IOException
}
import scala.util.{Try, Success, Failure}
import scala.util.control.NonFatal
/**
* A [[FileHandle]] into a regular file.
*
* DON'T MAKE THESE - if you want to handle a file, please get it from
* [[ResourceManager.handle ResourceManager.handle()]].
*
* The FileHandle system is intended to allow you to treat exotic resources,
* such as files in zip/jar archives or resources accessed over the netweork,
* as though they were on the filesystem as regular files, but this only works
* if you treat all files you have to access as instances of [[FileHandle]]].
* If you ever refer to files as [[FilesystemFileHandle]], [[ZipFileHandle]],
* or [[JarFileHandle]] explicitly in your code, you are doing the Wrong Thing
* and negating a whole lot of time and effort I put into this system.
* So don't do that.
*
* To reiterate, do NOT call the constructor for this
*
* @param virtualPath the virtual path to the file in the fake filesystem
* @param back a [[java.io.File]] representing the file in the filesystem
* @param manager An [[ResourceManager ResourceManager]] managing
* this FileHandle
* @author Hawk Weisman
* @see [[ResourceManager ResourceManager]]
* @see [[FileHandle]]
* @since v2.0.0
*/
class FilesystemFileHandle (
virtualPath: String
, realPath: String
, private[this] val back: File
, manager: Option[ResourceManager]
) extends FileHandle(virtualPath, manager) {
require(realPath != "", "Physical path cannot be empty.")
def this(virtualPath: String, realPath: String, manager: ResourceManager)
= this (virtualPath, realPath, new File(realPath), Some(manager))
def this( virtualPath: String, realPath: String
, manager: Option[ResourceManager])
= this (virtualPath, realPath, new File(realPath), manager)
override val file = Some(back)
@inline
@throws[IOException]
private[this] def getManager
= this.manager
.getOrElse(throw new IOException(
"FATAL: ResourceManager instance required!"))
override def read: Try[InputStream]
= if (!exists || isDirectory) {
Failure(new IOException(s"FileHandle $path is not readable."))
} else Try(new FileInputStream(back))
override def exists: Boolean = back.exists
override lazy val isDirectory: Boolean
= back.isDirectory
override def length: Long
= if (isDirectory) 0 else back.length
override def list: Try[Seq[FileHandle]]
= Try(if (isDirectory) {
back.list map {
case isArchiveRE(name, ".jar") ⇒
new JarFileHandle( "/"
, new File(s"$assumePhysPath/$name.jar")
, manager)
case isArchiveRE(name, ".zip") ⇒
new ZipFileHandle( "/"
, new File(s"$assumePhysPath/$name.zip")
, manager)
case item ⇒
new FilesystemFileHandle( s"$path/$item"
, s"$assumePhysPath/$item"
, manager)
}
} else { Seq() })
override lazy val physicalPath: Some[String]
= Some(realPath.replace('/', File.separatorChar))
override def delete: Boolean
= if (writable && exists) back.delete else false
override def write(append: Boolean): Option[OutputStream]
= if (writable) { Some(new FileOutputStream(back, append)) } else None
@throws[IOException]("if something unexpected went wrong")
override def writable: Boolean // TODO: should this be Try[Boolean]?
= manager.exists(_ isPathWritable this.path) && // is the path writable at fs level?
!isDirectory && // directories are not writable
(back.canWrite || // file exists and is writable, or...
(Try(back.createNewFile()) match { // try to create the file
case Failure(i: IOException) // if not permitted to write, that's OK
if i.getMessage == "Permission denied" ⇒ false
case Failure(NonFatal(e)) ⇒ throw new IOException(
s"Could not create FileHandle $this, an exception occured.", e)
case Success(result) ⇒ result
})
)
}
|
MeteorCode/Pathway
|
src/main/scala/com/meteorcode/pathway/io/FilesystemFileHandle.scala
|
Scala
|
mit
| 4,302
|
import sbt._
import Keys._
import com.typesafe.sbteclipse.plugin.EclipsePlugin.EclipseKeys
import com.typesafe.sbteclipse.plugin.EclipsePlugin.EclipseCreateSrc
import Dependencies._
import de.johoop.findbugs4sbt.FindBugs._
import org.scalafmt.sbt.ScalaFmtPlugin.autoImport._
import de.johoop.findbugs4sbt.ReportType
object NakadiClient extends Build {
EclipseKeys.createSrc := EclipseCreateSrc.Default + EclipseCreateSrc.Resource
EclipseKeys.withSource := true
private val commonSettings = net.virtualvoid.sbt.graph.DependencyGraphSettings.graphSettings
def whereToPublishTo(isItSnapshot:Boolean) = {
val nexus = "https://maven.zalando.net/"
if (isItSnapshot)
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "content/repositories/releases")
}
val defaultOptions= Seq(
"-deprecation", // Emit warning and location for usages of deprecated APIs.
"-feature", // Emit warning and location for usages of features that should be imported explicitly.
"-unchecked", // Enable additional warnings where generated code depends on assumptions.
"-Xfatal-warnings", // Fail the compilation if there are any warnings.
"-Xlint", // Enable recommended additional warnings.
"-Ywarn-adapted-args", // Warn if an argument list is modified to match the receiver.
"-Ywarn-dead-code", // Warn when dead code is identified.
"-Ywarn-inaccessible", // Warn about inaccessible types in method signatures.
"-Ywarn-nullary-override", // Warn when non-nullary overrides nullary, e.g. def foo() over def foo.
"-Ywarn-numeric-widen" // Warn when numerics are widened.
)
lazy val root = project.in(file("."))
.settings(publishTo := whereToPublishTo(isSnapshot.value))
.aggregate(api, client, it)
lazy val api = withDefaults(
"nakadi-klients-api",
project.in(file("api"))
).settings(libraryDependencies ++= apiDeps)
lazy val client = withDefaults(
"nakadi-klients",
project.in(file("client")).dependsOn(api)
).settings(libraryDependencies ++= clientDeps)
lazy val it = withDefaults(
"nakadi-klients-integration-test",
project.in(file("it")).dependsOn(api, client)
).settings(libraryDependencies ++= itDeps)
def withDefaults(projectName:String, project:sbt.Project)={
project
.settings(findbugsSettings: _*)
.settings(
name := projectName,
organization := "org.zalando.nakadi.client",
version := "2.0.5",
crossPaths := false,
scalaVersion := "2.11.8",
findbugsReportType := Some(ReportType.FancyHtml),
findbugsReportPath := Some(target.value / "findbugs-report.html"),
scalafmtConfig := Some(baseDirectory.value / "../.scalafmt"),
publishTo := whereToPublishTo(isSnapshot.value),
resolvers += Resolver.mavenLocal,
resolvers += "Maven Central Server" at "http://repo1.maven.org/maven2",
scalacOptions ++= defaultOptions,
publishArtifact in (Test, packageBin) := false)
.configs(Configs.all: _*)
/*.settings(
publishArtifact in (Compile, packageDoc) := true //To Publish or Not to Publish scala doc jar
,publishArtifact in (Compile, packageSrc) := true //To Publish or Not to Publish src jar
,publishArtifact := publish // To Publish or Not to Publish
,publishArtifact in Test := false //To Publish or Not to Publish test jar
,sources in (Compile,doc) := Seq.empty
)
*/
}
}
|
zalando/nakadi-klients
|
project/Build.scala
|
Scala
|
mit
| 3,483
|
/*******************************************************************************
* Copyright 2010 Maxime Lévesque
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************** */
package org.squeryl.dsl.internal
import org.squeryl.Queryable
import org.squeryl.internals.ResultSetMapper
import java.sql.ResultSet
trait JoinedQueryable[A] extends Queryable[A] {
def name =
throw new UnsupportedOperationException('OuterJoinedQueryable + " is a temporary class, not meant to become part of the ast")
private[squeryl] def give(resultSetMapper: ResultSetMapper, rs: ResultSet) =
throw new UnsupportedOperationException('OuterJoinedQueryable + " is a temporary class, not meant to become part of the ast")
}
class OuterJoinedQueryable[A](val queryable: Queryable[A], val leftRightOrFull: String) extends JoinedQueryable[Option[A]]{
/**
* Allowing an implicit conversion from OuterJoinedQueryable to OptionalQueryable will trigger another conversion
* to InnerJoinedQueryable inside org.squeryl.dsl.boilerplate.JoinSignatures#join. This also allows us to inhibit
* the table without using Option[Option[T]] in our results
*/
def inhibitWhen(inhibited: Boolean) = {
this.inhibited = inhibited
this
}
}
class InnerJoinedQueryable[A](val queryable: Queryable[A], val leftRightOrFull: String) extends JoinedQueryable[A]
|
rreckel/Squeryl
|
src/main/scala/org/squeryl/dsl/internal/JoinedQueryable.scala
|
Scala
|
apache-2.0
| 1,983
|
package org.jetbrains.plugins.scala.lang.resolve2
/**
* Pavel.Fatin, 02.02.2010
*/
class PredefLiteralTest extends ResolveTestBase {
override def folderPath: String = {
super.folderPath + "predef/literal/"
}
//TODO answer?
// def testBoolean = doTest
//TODO answer?
// def testByte = doTest
//TODO answer?
// def testChar = doTest
//TODO answer?
// def testDouble = doTest
//TODO answer?
// def testFloat = doTest
//TODO answer?
// def testInt = doTest
//TODO answer?
// def testLong = doTest
//TODO answer?
def testPrimitive() = doTest()
//TODO answer?
// def testTheNull = doTest
//TODO answer?
// def testShort = doTest
//TODO answer?
// def testString = doTest
//TODO answer?
// def testSymbol = doTest
}
|
ilinum/intellij-scala
|
test/org/jetbrains/plugins/scala/lang/resolve2/PredefLiteralTest.scala
|
Scala
|
apache-2.0
| 758
|
/*
* Copyright 2013 Twitter inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.storehaus.hbase
import org.apache.hadoop.hbase.HBaseTestingUtility
import org.apache.hadoop.hbase.client.HTablePool
import com.twitter.storehaus.testing.CloseableCleanup
import com.twitter.util.Closable
/**
* @author MansurAshraf
* @since 9/8/13
*/
trait DefaultHBaseCluster[C <: Closable] extends CloseableCleanup[C] {
val quorumNames = Seq("localhost:2181")
val table = "summing_bird"
val columnFamily = "sb"
val column = "aggregate"
val createTable = true
val testingUtil = new HBaseTestingUtility()
val conf = testingUtil.getConfiguration
val pool = new HTablePool(conf, 1)
override def cleanup() = {
super.cleanup()
/* testingUtil.shutdownMiniZKCluster()
testingUtil.shutdownMiniCluster()*/
}
}
|
tresata/storehaus
|
storehaus-hbase/src/test/scala/com/twitter/storehaus/hbase/DefaultHBaseCluster.scala
|
Scala
|
apache-2.0
| 1,381
|
package org.atnos.eff
package addon.scalaz
import scalaz._
object validate extends validate
trait validate {
def runValidationNel[R, U, E, A](r: Eff[R, A])(implicit m: Member.Aux[Validate[E, ?], R, U]): Eff[U, ValidationNel[E, A]] =
org.atnos.eff.all.runValidatedNel(r).map(_.fold(ls => Validation.failure(NonEmptyList(ls.head, ls.tail:_*)), Validation.success))
def runNelDisjunction[R, U, E, A](r: Eff[R, A])(implicit m: Member.Aux[Validate[E, ?], R, U]): Eff[U, NonEmptyList[E] \\/ A] =
org.atnos.eff.all.runNel(r).map(_.fold(ls => \\/.left(NonEmptyList(ls.head, ls.tail:_*)), \\/.right))
def runMapDisjunction[R, U, E, L : Semigroup, A](r: Eff[R, A])(map: E => L)(implicit m: Member.Aux[Validate[E, ?], R, U]): Eff[U, L \\/ A] =
org.atnos.eff.all.runMap(r)(map)(catsSemigroup(Semigroup[L]), m).map(_.fold(\\/.left, \\/.right))
}
|
etorreborre/eff
|
scalaz/src/main/scala/org/atnos/eff/addon/scalaz/validate.scala
|
Scala
|
mit
| 852
|
package com.ing.baker.runtime.akka.actor
import akka.actor.{ActorRef, ActorSystem, Address}
import akka.cluster.Cluster
import akka.cluster.sharding.ShardRegion._
import akka.cluster.sharding.{ClusterSharding, ClusterShardingSettings, ShardRegion}
import akka.management.cluster.bootstrap.ClusterBootstrap
import akka.management.scaladsl.AkkaManagement
import akka.util.Timeout
import cats.data.NonEmptyList
import cats.effect.IO
import com.ing.baker.il.sha256HashCode
import com.ing.baker.runtime.akka.AkkaBakerConfig
import com.ing.baker.runtime.akka.actor.ClusterBakerActorProvider._
import com.ing.baker.runtime.akka.actor.process_index.ProcessIndex.ActorMetadata
import com.ing.baker.runtime.akka.actor.process_index.ProcessIndexProtocol._
import com.ing.baker.runtime.akka.actor.process_index._
import com.ing.baker.runtime.akka.actor.serialization.BakerSerializable
import com.ing.baker.runtime.model.InteractionManager
import com.ing.baker.runtime.recipe_manager.RecipeManager
import com.ing.baker.runtime.serialization.Encryption
import com.typesafe.scalalogging.LazyLogging
import scala.concurrent.duration._
import scala.concurrent.{Await, TimeoutException}
object ClusterBakerActorProvider {
case class GetShardIndex(entityId: String) extends BakerSerializable
sealed trait ClusterBootstrapMode
case class SeedNodesList(nel: NonEmptyList[Address]) extends ClusterBootstrapMode
case object ServiceDiscovery extends ClusterBootstrapMode
/**
* This function calculates the names of the ActorIndex actors
* gets the least significant bits of the UUID, and returns the MOD 10
* So we have at most 10 manager actors created, all the petrinet actors will fall under these 10 actors
* Note, the nrOfShards used here has to be aligned with the nrOfShards used in the shardIdExtractor
*/
def entityId(recipeInstanceId: String, nrOfShards: Int): String =
s"index-${Math.abs(sha256HashCode(recipeInstanceId) % nrOfShards)}"
// extracts the actor id -> message from the incoming message
// Entity id is the first character of the UUID
def entityIdExtractor(nrOfShards: Int): ExtractEntityId = {
case msg: ProcessIndexMessage => (entityId(msg.recipeInstanceId, nrOfShards), msg)
case GetShardIndex(entityId) => (entityId, GetIndex)
case msg => throw new IllegalArgumentException(s"Message of type ${msg.getClass} not recognized")
}
// extracts the shard id from the incoming message
def shardIdExtractor(nrOfShards: Int): ExtractShardId = {
case msg: ProcessIndexMessage => Math.abs(sha256HashCode(msg.recipeInstanceId) % nrOfShards).toString
case GetShardIndex(entityId) => entityId.split(s"index-").last
case ShardRegion.StartEntity(entityId) => entityId.split(s"index-").last
case msg => throw new IllegalArgumentException(s"Message of type ${msg.getClass} not recognized")
}
val recipeManagerName = "RecipeManager"
}
class ClusterBakerActorProvider(
nrOfShards: Int,
retentionCheckInterval: FiniteDuration,
actorIdleTimeout: Option[FiniteDuration],
journalInitializeTimeout: FiniteDuration,
seedNodes: ClusterBootstrapMode,
ingredientsFilter: List[String],
configuredEncryption: Encryption,
timeouts: AkkaBakerConfig.Timeouts,
) extends BakerActorProvider with LazyLogging {
def initialize(implicit system: ActorSystem): Unit = {
/**
* Join cluster after waiting for the persistenceInit actor, otherwise terminate here.
*/
try {
Await.result(Util.persistenceInit(journalInitializeTimeout), journalInitializeTimeout)
} catch {
case _: TimeoutException => throw new IllegalStateException(s"Timeout when trying to initialize the akka journal, waited $journalInitializeTimeout")
}
// join the cluster
logger.info("PersistenceInit actor started successfully, joining cluster seed nodes {}", seedNodes)
seedNodes match {
case SeedNodesList(nel) =>
Cluster.get(system).joinSeedNodes(nel.toList)
case ServiceDiscovery =>
AkkaManagement(system).start()
ClusterBootstrap(system).start()
}
}
override def createProcessIndexActor(interactionManager: InteractionManager[IO],
recipeManager: RecipeManager)(implicit actorSystem: ActorSystem): ActorRef = {
val roles = Cluster(actorSystem).selfRoles
ClusterSharding(actorSystem).start(
typeName = "ProcessIndexActor",
entityProps = ProcessIndex.props(actorIdleTimeout, Some(retentionCheckInterval), configuredEncryption, interactionManager, recipeManager, ingredientsFilter),
settings = {
if (roles.contains("state-node"))
ClusterShardingSettings(actorSystem).withRole("state-node")
else
ClusterShardingSettings(actorSystem)
},
extractEntityId = ClusterBakerActorProvider.entityIdExtractor(nrOfShards),
extractShardId = ClusterBakerActorProvider.shardIdExtractor(nrOfShards)
)
}
def getAllProcessesMetadata(actor: ActorRef)(implicit system: ActorSystem, timeout: FiniteDuration): Seq[ActorMetadata] = {
import akka.pattern.ask
import system.dispatcher
implicit val akkaTimeout: Timeout = timeout
val futures = (0 to nrOfShards).map { shard => actor.ask(GetShardIndex(s"index-$shard")).mapTo[Index].map(_.entries) }
val collected: Seq[ActorMetadata] = Util.collectFuturesWithin(futures, timeout, system.scheduler).flatten
collected
}
}
|
ing-bank/baker
|
core/akka-runtime/src/main/scala/com/ing/baker/runtime/akka/actor/ClusterBakerActorProvider.scala
|
Scala
|
mit
| 5,716
|
package org.tmoerman.plongeur.tda
import com.holdenkarau.spark.testing.SharedSparkContext
import org.scalatest.{FunSuite, Matchers, FlatSpec}
/**
* @author Thomas Moerman
*/
class TDASpec extends FunSuite with SharedSparkContext {
}
|
tmoerman/plongeur
|
scala/plongeur-spark/src/test/scala/org/tmoerman/plongeur/tda/TDASpec.scala
|
Scala
|
mit
| 239
|
/**
* Copyright (C) 2011 Typesafe Inc. <http://typesafe.com>
*/
package com.typesafe.config.impl
import org.junit.Assert._
import org.junit.Test
import com.typesafe.config.ConfigException
import language.implicitConversions
class TokenizerTest extends TestUtils {
// FIXME most of this file should be using this method
private def tokenizerTest(expected: List[Token], s: String) {
assertEquals(List(Tokens.START) ++ expected ++ List(Tokens.END),
tokenizeAsList(s))
}
@Test
def tokenizeEmptyString() {
assertEquals(List(Tokens.START, Tokens.END),
tokenizeAsList(""))
}
@Test
def tokenizeNewlines() {
assertEquals(List(Tokens.START, tokenLine(1), tokenLine(2), Tokens.END),
tokenizeAsList("\\n\\n"))
}
@Test
def tokenizeAllTypesNoSpaces() {
// all token types with no spaces (not sure JSON spec wants this to work,
// but spec is unclear to me when spaces are required, and banning them
// is actually extra work).
val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY,
Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, Tokens.PLUS_EQUALS, tokenString("foo"),
tokenString("bar"), tokenTrue, tokenDouble(3.14), tokenFalse,
tokenLong(42), tokenNull, tokenSubstitution(tokenUnquoted("a.b")),
tokenOptionalSubstitution(tokenUnquoted("x.y")),
tokenKeySubstitution("c.d"), tokenLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(""",:=}{][+="foo"""" + "\\"\\"\\"bar\\"\\"\\"" + """true3.14false42null${a.b}${?x.y}${"c.d"}""" + "\\n"))
}
@Test
def tokenizeAllTypesWithSingleSpaces() {
val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY,
Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, Tokens.PLUS_EQUALS, tokenString("foo"),
tokenUnquoted(" "), tokenString("bar"), tokenUnquoted(" "), tokenLong(42), tokenUnquoted(" "), tokenTrue, tokenUnquoted(" "),
tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull,
tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "),
tokenOptionalSubstitution(tokenUnquoted("x.y")), tokenUnquoted(" "),
tokenKeySubstitution("c.d"),
tokenLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(""" , : = } { ] [ += "foo" """ + "\\"\\"\\"bar\\"\\"\\"" + """ 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} """ + "\\n "))
}
@Test
def tokenizeAllTypesWithMultipleSpaces() {
val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY,
Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, Tokens.PLUS_EQUALS, tokenString("foo"),
tokenUnquoted(" "), tokenString("bar"), tokenUnquoted(" "), tokenLong(42), tokenUnquoted(" "), tokenTrue, tokenUnquoted(" "),
tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull,
tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "),
tokenOptionalSubstitution(tokenUnquoted("x.y")), tokenUnquoted(" "),
tokenKeySubstitution("c.d"),
tokenLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(""" , : = } { ] [ += "foo" """ + "\\"\\"\\"bar\\"\\"\\"" + """ 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} """ + "\\n "))
}
@Test
def tokenizeTrueAndUnquotedText() {
val expected = List(Tokens.START, tokenTrue, tokenUnquoted("foo"), Tokens.END)
assertEquals(expected, tokenizeAsList("""truefoo"""))
}
@Test
def tokenizeFalseAndUnquotedText() {
val expected = List(Tokens.START, tokenFalse, tokenUnquoted("foo"), Tokens.END)
assertEquals(expected, tokenizeAsList("""falsefoo"""))
}
@Test
def tokenizeNullAndUnquotedText() {
val expected = List(Tokens.START, tokenNull, tokenUnquoted("foo"), Tokens.END)
assertEquals(expected, tokenizeAsList("""nullfoo"""))
}
@Test
def tokenizeUnquotedTextContainingTrue() {
val expected = List(Tokens.START, tokenUnquoted("footrue"), Tokens.END)
assertEquals(expected, tokenizeAsList("""footrue"""))
}
@Test
def tokenizeUnquotedTextContainingSpaceTrue() {
val expected = List(Tokens.START, tokenUnquoted("foo"), tokenUnquoted(" "), tokenTrue, Tokens.END)
assertEquals(expected, tokenizeAsList("""foo true"""))
}
@Test
def tokenizeTrueAndSpaceAndUnquotedText() {
val expected = List(Tokens.START, tokenTrue, tokenUnquoted(" "), tokenUnquoted("foo"), Tokens.END)
assertEquals(expected, tokenizeAsList("""true foo"""))
}
@Test
def tokenizeUnquotedTextContainingSlash() {
tokenizerTest(List(tokenUnquoted("a/b/c/")), "a/b/c/")
tokenizerTest(List(tokenUnquoted("/")), "/")
tokenizerTest(List(tokenUnquoted("/"), tokenUnquoted(" "), tokenUnquoted("/")), "/ /")
tokenizerTest(List(tokenComment("")), "//")
}
@Test
def tokenizeUnquotedTextTrimsSpaces() {
val expected = List(Tokens.START, tokenUnquoted("foo"), tokenLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(" foo \\n"))
}
@Test
def tokenizeUnquotedTextKeepsInternalSpaces() {
val expected = List(Tokens.START, tokenUnquoted("foo"), tokenUnquoted(" "), tokenUnquoted("bar"),
tokenUnquoted(" "), tokenUnquoted("baz"), tokenLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(" foo bar baz \\n"))
}
@Test
def tokenizeMixedUnquotedQuoted() {
val expected = List(Tokens.START, tokenUnquoted("foo"),
tokenString("bar"), tokenUnquoted("baz"),
tokenLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(" foo\\"bar\\"baz \\n"))
}
@Test
def tokenizerUnescapeStrings(): Unit = {
case class UnescapeTest(escaped: String, result: ConfigString)
implicit def pair2unescapetest(pair: (String, String)): UnescapeTest = UnescapeTest(pair._1, new ConfigString(fakeOrigin(), pair._2))
// getting the actual 6 chars we want in a string is a little pesky.
// \\u005C is backslash. Just prove we're doing it right here.
assertEquals(6, "\\\\u0046".length)
assertEquals('4', "\\\\u0046"(4))
assertEquals('6', "\\\\u0046"(5))
val tests = List[UnescapeTest]((""" "" """, ""),
(" \\"\\\\u0000\\" ", Character.toString(0)), // nul byte
(""" "\\"\\\\\\/\\b\\f\\n\\r\\t" """, "\\"\\\\/\\b\\f\\n\\r\\t"),
("\\"\\\\u0046\\"", "F"),
("\\"\\\\u0046\\\\u0046\\"", "FF"))
for (t <- tests) {
describeFailure(t.toString) {
assertEquals(List(Tokens.START, Tokens.newValue(t.result), Tokens.END),
tokenizeAsList(t.escaped))
}
}
}
@Test
def tokenizerReturnsProblemOnInvalidStrings(): Unit = {
val invalidTests = List(""" "\\" """, // nothing after a backslash
""" "\\q" """, // there is no \\q escape sequence
"\\"\\\\u123\\"", // too short
"\\"\\\\u12\\"", // too short
"\\"\\\\u1\\"", // too short
"\\"\\\\u\\"", // too short
"\\"", // just a single quote
""" "abcdefg""", // no end quote
"""\\"\\""", // file ends with a backslash
"$", // file ends with a $
"${" // file ends with a ${
)
for (t <- invalidTests) {
val tokenized = tokenizeAsList(t)
val maybeProblem = tokenized.find(Tokens.isProblem(_))
assertTrue(maybeProblem.isDefined)
}
}
@Test
def tokenizerEmptyTripleQuoted(): Unit = {
assertEquals(List(Tokens.START, tokenString(""), Tokens.END),
tokenizeAsList("\\"\\"\\"\\"\\"\\""))
}
@Test
def tokenizerTrivialTripleQuoted(): Unit = {
assertEquals(List(Tokens.START, tokenString("bar"), Tokens.END),
tokenizeAsList("\\"\\"\\"bar\\"\\"\\""))
}
@Test
def tokenizerNoEscapesInTripleQuoted(): Unit = {
assertEquals(List(Tokens.START, tokenString("\\\\n"), Tokens.END),
tokenizeAsList("\\"\\"\\"\\\\n\\"\\"\\""))
}
@Test
def tokenizerTrailingQuotesInTripleQuoted(): Unit = {
assertEquals(List(Tokens.START, tokenString("\\"\\"\\""), Tokens.END),
tokenizeAsList("\\"\\"\\"\\"\\"\\"\\"\\"\\""))
}
@Test
def tokenizerNewlineInTripleQuoted(): Unit = {
assertEquals(List(Tokens.START, tokenString("foo\\nbar"), Tokens.END),
tokenizeAsList("\\"\\"\\"foo\\nbar\\"\\"\\""))
}
@Test
def tokenizerParseNumbers(): Unit = {
abstract class NumberTest(val s: String, val result: Token)
case class LongTest(override val s: String, override val result: Token) extends NumberTest(s, result)
case class DoubleTest(override val s: String, override val result: Token) extends NumberTest(s, result)
implicit def pair2inttest(pair: (String, Int)) = LongTest(pair._1, tokenLong(pair._2))
implicit def pair2longtest(pair: (String, Long)) = LongTest(pair._1, tokenLong(pair._2))
implicit def pair2doubletest(pair: (String, Double)) = DoubleTest(pair._1, tokenDouble(pair._2))
val tests = List[NumberTest](("1", 1),
("1.2", 1.2),
("1e6", 1e6),
("1e-6", 1e-6),
("1E-6", 1e-6), // capital E is allowed
("-1", -1),
("-1.2", -1.2))
for (t <- tests) {
describeFailure(t.toString()) {
assertEquals(List(Tokens.START, t.result, Tokens.END),
tokenizeAsList(t.s))
}
}
}
@Test
def commentsHandledInVariousContexts() {
tokenizerTest(List(tokenString("//bar")), "\\"//bar\\"")
tokenizerTest(List(tokenString("#bar")), "\\"#bar\\"")
tokenizerTest(List(tokenUnquoted("bar"), tokenComment("comment")), "bar//comment")
tokenizerTest(List(tokenUnquoted("bar"), tokenComment("comment")), "bar#comment")
tokenizerTest(List(tokenInt(10), tokenComment("comment")), "10//comment")
tokenizerTest(List(tokenInt(10), tokenComment("comment")), "10#comment")
tokenizerTest(List(tokenDouble(3.14), tokenComment("comment")), "3.14//comment")
tokenizerTest(List(tokenDouble(3.14), tokenComment("comment")), "3.14#comment")
// be sure we keep the newline
tokenizerTest(List(tokenInt(10), tokenComment("comment"), tokenLine(1), tokenInt(12)), "10//comment\\n12")
tokenizerTest(List(tokenInt(10), tokenComment("comment"), tokenLine(1), tokenInt(12)), "10#comment\\n12")
}
@Test
def tokenizeReservedChars() {
for (invalid <- "+`^?!@*&\\\\") {
val tokenized = tokenizeAsList(invalid.toString)
assertEquals(3, tokenized.size)
assertEquals(Tokens.START, tokenized(0))
assertEquals(Tokens.END, tokenized(2))
val problem = tokenized(1)
assertTrue("reserved char is a problem", Tokens.isProblem(problem))
if (invalid == '+')
assertEquals("end of file", Tokens.getProblemWhat(problem))
else
assertEquals("" + invalid, Tokens.getProblemWhat(problem))
}
}
}
|
zeq9069/config
|
config/src/test/scala/com/typesafe/config/impl/TokenizerTest.scala
|
Scala
|
apache-2.0
| 11,550
|
/* sbt -- Simple Build Tool
* Copyright 2011 Mark Harrah
*/
package sbt
import java.io.File
import java.net.{ URI, URL }
import compiler.{ Eval, EvalImports }
import classpath.ClasspathUtilities
import scala.annotation.tailrec
import collection.mutable
import Compiler.Compilers
import inc.{ FileValueCache, Locate }
import Project.{ inScope, makeSettings }
import Def.{ isDummy, ScopedKey, ScopeLocal, Setting }
import Keys.{ appConfiguration, baseDirectory, configuration, fullResolvers, fullClasspath, pluginData, streams, thisProject, thisProjectRef, update }
import Keys.{ exportedProducts, loadedBuild, onLoadMessage, resolvedScoped, sbtPlugin, scalacOptions, taskDefinitionKey }
import tools.nsc.reporters.ConsoleReporter
import Attributed.data
import Scope.{ GlobalScope, ThisScope }
import Types.const
import BuildPaths._
import BuildStreams._
import Locate.DefinesClass
object Load {
// note that there is State passed in but not pulled out
def defaultLoad(state: State, baseDirectory: File, log: Logger, isPlugin: Boolean = false, topLevelExtras: List[URI] = Nil): (() => Eval, sbt.BuildStructure) =
{
val globalBase = getGlobalBase(state)
val base = baseDirectory.getCanonicalFile
val definesClass = FileValueCache(Locate.definesClass _)
val rawConfig = defaultPreGlobal(state, base, definesClass.get, globalBase, log)
val config0 = defaultWithGlobal(state, base, rawConfig, globalBase, log)
val config = if (isPlugin) enableSbtPlugin(config0) else config0.copy(extraBuilds = topLevelExtras)
val result = apply(base, state, config)
definesClass.clear()
result
}
def defaultPreGlobal(state: State, baseDirectory: File, definesClass: DefinesClass, globalBase: File, log: Logger): sbt.LoadBuildConfiguration =
{
val provider = state.configuration.provider
val scalaProvider = provider.scalaProvider
val stagingDirectory = getStagingDirectory(state, globalBase).getCanonicalFile
val loader = getClass.getClassLoader
val classpath = Attributed.blankSeq(provider.mainClasspath ++ scalaProvider.jars)
val compilers = Compiler.compilers(ClasspathOptions.boot)(state.configuration, log)
val evalPluginDef = EvaluateTask.evalPluginDef(log) _
val delegates = defaultDelegates
val initialID = baseDirectory.getName
val pluginMgmt = PluginManagement(loader)
val inject = InjectSettings(injectGlobal(state), Nil, const(Nil))
new sbt.LoadBuildConfiguration(stagingDirectory, classpath, loader, compilers, evalPluginDef, definesClass, delegates,
EvaluateTask.injectStreams, pluginMgmt, inject, None, Nil, log)
}
def injectGlobal(state: State): Seq[Setting[_]] =
(appConfiguration in GlobalScope :== state.configuration) +:
LogManager.settingsLogger(state) +:
EvaluateTask.injectSettings
def defaultWithGlobal(state: State, base: File, rawConfig: sbt.LoadBuildConfiguration, globalBase: File, log: Logger): sbt.LoadBuildConfiguration =
{
val globalPluginsDir = getGlobalPluginsDirectory(state, globalBase)
val withGlobal = loadGlobal(state, base, globalPluginsDir, rawConfig)
val globalSettings = configurationSources(getGlobalSettingsDirectory(state, globalBase))
loadGlobalSettings(base, globalBase, globalSettings, withGlobal)
}
def loadGlobalSettings(base: File, globalBase: File, files: Seq[File], config: sbt.LoadBuildConfiguration): sbt.LoadBuildConfiguration =
{
val compiled: ClassLoader => Seq[Setting[_]] =
if (files.isEmpty || base == globalBase) const(Nil) else buildGlobalSettings(globalBase, files, config)
config.copy(injectSettings = config.injectSettings.copy(projectLoaded = compiled))
}
def buildGlobalSettings(base: File, files: Seq[File], config: sbt.LoadBuildConfiguration): ClassLoader => Seq[Setting[_]] =
{
val eval = mkEval(data(config.classpath), base, defaultEvalOptions)
val imports = BuildUtil.baseImports ++ BuildUtil.importAllRoot(config.globalPluginNames)
loader => EvaluateConfigurations(eval, files, imports)(loader).settings
}
def loadGlobal(state: State, base: File, global: File, config: sbt.LoadBuildConfiguration): sbt.LoadBuildConfiguration =
if (base != global && global.exists) {
val gp = GlobalPlugin.load(global, state, config)
val pm = setGlobalPluginLoader(gp, config.pluginManagement)
val cp = (gp.data.fullClasspath ++ config.classpath).distinct
config.copy(globalPlugin = Some(gp), pluginManagement = pm, classpath = cp)
} else
config
private[this] def setGlobalPluginLoader(gp: GlobalPlugin, pm: PluginManagement): PluginManagement =
{
val newLoader = ClasspathUtilities.toLoader(data(gp.data.fullClasspath), pm.initialLoader)
pm.copy(initialLoader = newLoader)
}
def defaultDelegates: sbt.LoadedBuild => Scope => Seq[Scope] = (lb: sbt.LoadedBuild) => {
val rootProject = getRootProject(lb.units)
def resolveRef(project: Reference): ResolvedReference = Scope.resolveReference(lb.root, rootProject, project)
Scope.delegates(
lb.allProjectRefs,
(_: ResolvedProject).configurations.map(c => ConfigKey(c.name)),
resolveRef,
rootProject,
project => projectInherit(lb, project),
(project, config) => configInherit(lb, project, config, rootProject),
task => task.extend,
(project, extra) => Nil
)
}
def configInherit(lb: sbt.LoadedBuild, ref: ResolvedReference, config: ConfigKey, rootProject: URI => String): Seq[ConfigKey] =
ref match {
case pr: ProjectRef => configInheritRef(lb, pr, config)
case BuildRef(uri) => configInheritRef(lb, ProjectRef(uri, rootProject(uri)), config)
}
def configInheritRef(lb: sbt.LoadedBuild, ref: ProjectRef, config: ConfigKey): Seq[ConfigKey] =
configurationOpt(lb.units, ref.build, ref.project, config).toList.flatMap(_.extendsConfigs).map(c => ConfigKey(c.name))
def projectInherit(lb: sbt.LoadedBuild, ref: ProjectRef): Seq[ProjectRef] =
getProject(lb.units, ref.build, ref.project).delegates
// build, load, and evaluate all units.
// 1) Compile all plugin definitions
// 2) Evaluate plugin definitions to obtain and compile plugins and get the resulting classpath for the build definition
// 3) Instantiate Plugins on that classpath
// 4) Compile all build definitions using plugin classpath
// 5) Load build definitions.
// 6) Load all configurations using build definitions and plugins (their classpaths and loaded instances).
// 7) Combine settings from projects, plugins, and configurations
// 8) Evaluate settings
def apply(rootBase: File, s: State, config: sbt.LoadBuildConfiguration): (() => Eval, sbt.BuildStructure) =
{
// load, which includes some resolution, but can't fill in project IDs yet, so follow with full resolution
val loaded = resolveProjects(load(rootBase, s, config))
val projects = loaded.units
lazy val rootEval = lazyEval(loaded.units(loaded.root).unit)
val settings = finalTransforms(buildConfigurations(loaded, getRootProject(projects), config.injectSettings))
val delegates = config.delegates(loaded)
val data = Def.make(settings)(delegates, config.scopeLocal, Project.showLoadingKey(loaded))
Project.checkTargets(data) foreach error
val index = structureIndex(data, settings, loaded.extra(data), projects)
val streams = mkStreams(projects, loaded.root, data)
(rootEval, new sbt.BuildStructure(projects, loaded.root, settings, data, index, streams, delegates, config.scopeLocal))
}
// map dependencies on the special tasks:
// 1. the scope of 'streams' is the same as the defining key and has the task axis set to the defining key
// 2. the defining key is stored on constructed tasks: used for error reporting among other things
// 3. resolvedScoped is replaced with the defining key as a value
// Note: this must be idempotent.
def finalTransforms(ss: Seq[Setting[_]]): Seq[Setting[_]] =
{
def mapSpecial(to: ScopedKey[_]) = new (ScopedKey ~> ScopedKey) {
def apply[T](key: ScopedKey[T]) =
if (key.key == streams.key)
ScopedKey(Scope.fillTaskAxis(Scope.replaceThis(to.scope)(key.scope), to.key), key.key)
else key
}
def setDefining[T] = (key: ScopedKey[T], value: T) => value match {
case tk: Task[t] => setDefinitionKey(tk, key).asInstanceOf[T]
case ik: InputTask[t] => ik.mapTask(tk => setDefinitionKey(tk, key)).asInstanceOf[T]
case _ => value
}
def setResolved(defining: ScopedKey[_]) = new (ScopedKey ~> Option) {
def apply[T](key: ScopedKey[T]): Option[T] =
key.key match {
case resolvedScoped.key => Some(defining.asInstanceOf[T])
case _ => None
}
}
ss.map(s => s mapConstant setResolved(s.key) mapReferenced mapSpecial(s.key) mapInit setDefining)
}
def setDefinitionKey[T](tk: Task[T], key: ScopedKey[_]): Task[T] =
if (isDummy(tk)) tk else Task(tk.info.set(Keys.taskDefinitionKey, key), tk.work)
def structureIndex(data: Settings[Scope], settings: Seq[Setting[_]], extra: KeyIndex => BuildUtil[_], projects: Map[URI, LoadedBuildUnit]): sbt.StructureIndex =
{
val keys = Index.allKeys(settings)
val attributeKeys = Index.attributeKeys(data) ++ keys.map(_.key)
val scopedKeys = keys ++ data.allKeys((s, k) => ScopedKey(s, k))
val projectsMap = projects.mapValues(_.defined.keySet).toMap
val keyIndex = KeyIndex(scopedKeys, projectsMap)
val aggIndex = KeyIndex.aggregate(scopedKeys, extra(keyIndex), projectsMap)
new sbt.StructureIndex(Index.stringToKeyMap(attributeKeys), Index.taskToKeyMap(data), Index.triggers(data), keyIndex, aggIndex)
}
// Reevaluates settings after modifying them. Does not recompile or reload any build components.
def reapply(newSettings: Seq[Setting[_]], structure: sbt.BuildStructure)(implicit display: Show[ScopedKey[_]]): sbt.BuildStructure =
{
val transformed = finalTransforms(newSettings)
val newData = makeSettings(transformed, structure.delegates, structure.scopeLocal)
val newIndex = structureIndex(newData, transformed, index => BuildUtil(structure.root, structure.units, index, newData), structure.units)
val newStreams = mkStreams(structure.units, structure.root, newData)
new sbt.BuildStructure(units = structure.units, root = structure.root, settings = transformed, data = newData, index = newIndex, streams = newStreams, delegates = structure.delegates, scopeLocal = structure.scopeLocal)
}
def isProjectThis(s: Setting[_]) = s.key.scope.project match { case This | Select(ThisProject) => true; case _ => false }
def buildConfigurations(loaded: sbt.LoadedBuild, rootProject: URI => String, injectSettings: InjectSettings): Seq[Setting[_]] =
{
((loadedBuild in GlobalScope :== loaded) +:
transformProjectOnly(loaded.root, rootProject, injectSettings.global)) ++
inScope(GlobalScope)(pluginGlobalSettings(loaded) ++ loaded.autos.globalSettings) ++
loaded.units.toSeq.flatMap {
case (uri, build) =>
val plugins = build.unit.plugins.detected.plugins.values
val pluginBuildSettings = plugins.flatMap(_.buildSettings) ++ loaded.autos.buildSettings(uri)
val pluginNotThis = plugins.flatMap(_.settings) filterNot isProjectThis
val projectSettings = build.defined flatMap {
case (id, project) =>
val ref = ProjectRef(uri, id)
val defineConfig: Seq[Setting[_]] = for (c <- project.configurations) yield ((configuration in (ref, ConfigKey(c.name))) :== c)
val builtin: Seq[Setting[_]] = (thisProject :== project) +: (thisProjectRef :== ref) +: defineConfig
val settings = builtin ++ project.settings ++ injectSettings.project
// map This to thisScope, Select(p) to mapRef(uri, rootProject, p)
transformSettings(projectScope(ref), uri, rootProject, settings)
}
val buildScope = Scope(Select(BuildRef(uri)), Global, Global, Global)
val buildBase = baseDirectory :== build.localBase
val buildSettings = transformSettings(buildScope, uri, rootProject, pluginNotThis ++ pluginBuildSettings ++ (buildBase +: build.buildSettings))
buildSettings ++ projectSettings
}
}
@deprecated("Does not account for AutoPlugins and will be made private.", "0.13.2")
def pluginGlobalSettings(loaded: sbt.LoadedBuild): Seq[Setting[_]] =
loaded.units.toSeq flatMap {
case (_, build) =>
build.unit.plugins.detected.plugins.values flatMap { _.globalSettings }
}
@deprecated("No longer used.", "0.13.0")
def extractSettings(plugins: Seq[Plugin]): (Seq[Setting[_]], Seq[Setting[_]], Seq[Setting[_]]) =
(plugins.flatMap(_.settings), plugins.flatMap(_.projectSettings), plugins.flatMap(_.buildSettings))
def transformProjectOnly(uri: URI, rootProject: URI => String, settings: Seq[Setting[_]]): Seq[Setting[_]] =
Project.transform(Scope.resolveProject(uri, rootProject), settings)
def transformSettings(thisScope: Scope, uri: URI, rootProject: URI => String, settings: Seq[Setting[_]]): Seq[Setting[_]] =
Project.transform(Scope.resolveScope(thisScope, uri, rootProject), settings)
def projectScope(project: Reference): Scope = Scope(Select(project), Global, Global, Global)
def lazyEval(unit: sbt.BuildUnit): () => Eval =
{
lazy val eval = mkEval(unit)
() => eval
}
def mkEval(unit: sbt.BuildUnit): Eval = mkEval(unit.definitions, unit.plugins, unit.plugins.pluginData.scalacOptions)
def mkEval(defs: sbt.LoadedDefinitions, plugs: sbt.LoadedPlugins, options: Seq[String]): Eval =
mkEval(defs.target ++ plugs.classpath, defs.base, options)
def mkEval(classpath: Seq[File], base: File, options: Seq[String]): Eval =
new Eval(options, classpath, s => new ConsoleReporter(s), Some(evalOutputDirectory(base)))
def configurations(srcs: Seq[File], eval: () => Eval, imports: Seq[String]): ClassLoader => LoadedSbtFile =
if (srcs.isEmpty) const(LoadedSbtFile.empty) else EvaluateConfigurations(eval(), srcs, imports)
def load(file: File, s: State, config: sbt.LoadBuildConfiguration): sbt.PartBuild =
load(file, builtinLoader(s, config.copy(pluginManagement = config.pluginManagement.shift, extraBuilds = Nil)), config.extraBuilds.toList)
def builtinLoader(s: State, config: sbt.LoadBuildConfiguration): BuildLoader =
{
val fail = (uri: URI) => sys.error("Invalid build URI (no handler available): " + uri)
val resolver = (info: BuildLoader.ResolveInfo) => RetrieveUnit(info)
val build = (info: BuildLoader.BuildInfo) => Some(() => loadUnit(info.uri, info.base, info.state, info.config))
val components = BuildLoader.components(resolver, build, full = BuildLoader.componentLoader)
BuildLoader(components, fail, s, config)
}
def load(file: File, loaders: BuildLoader, extra: List[URI]): sbt.PartBuild = loadURI(IO.directoryURI(file), loaders, extra)
def loadURI(uri: URI, loaders: BuildLoader, extra: List[URI]): sbt.PartBuild =
{
IO.assertAbsolute(uri)
val (referenced, map, newLoaders) = loadAll(uri :: extra, Map.empty, loaders, Map.empty)
checkAll(referenced, map)
val build = new sbt.PartBuild(uri, map)
newLoaders transformAll build
}
def addOverrides(unit: sbt.BuildUnit, loaders: BuildLoader): BuildLoader =
loaders updatePluginManagement PluginManagement.extractOverrides(unit.plugins.fullClasspath)
def addResolvers(unit: sbt.BuildUnit, isRoot: Boolean, loaders: BuildLoader): BuildLoader =
unit.definitions.builds.flatMap(_.buildLoaders) match {
case Nil => loaders
case x :: xs =>
import Alternatives._
val resolver = (x /: xs) { _ | _ }
if (isRoot) loaders.setRoot(resolver) else loaders.addNonRoot(unit.uri, resolver)
}
def loaded(unit: sbt.BuildUnit): (sbt.PartBuildUnit, List[ProjectReference]) =
{
val defined = projects(unit)
if (defined.isEmpty) sys.error("No projects defined in build unit " + unit)
// since base directories are resolved at this point (after 'projects'),
// we can compare Files instead of converting to URIs
def isRoot(p: Project) = p.base == unit.localBase
val externals = referenced(defined).toList
val explicitRoots = unit.definitions.builds.flatMap(_.rootProject)
val projectsInRoot = if (explicitRoots.isEmpty) defined.filter(isRoot) else explicitRoots
val rootProjects = if (projectsInRoot.isEmpty) defined.head :: Nil else projectsInRoot
(new sbt.PartBuildUnit(unit, defined.map(d => (d.id, d)).toMap, rootProjects.map(_.id), buildSettings(unit)), externals)
}
def buildSettings(unit: sbt.BuildUnit): Seq[Setting[_]] =
{
val buildScope = GlobalScope.copy(project = Select(BuildRef(unit.uri)))
val resolve = Scope.resolveBuildScope(buildScope, unit.uri)
Project.transform(resolve, unit.definitions.builds.flatMap(_.settings))
}
@tailrec def loadAll(bases: List[URI], references: Map[URI, List[ProjectReference]], loaders: BuildLoader, builds: Map[URI, sbt.PartBuildUnit]): (Map[URI, List[ProjectReference]], Map[URI, sbt.PartBuildUnit], BuildLoader) =
bases match {
case b :: bs =>
if (builds contains b)
loadAll(bs, references, loaders, builds)
else {
val (loadedBuild, refs) = loaded(loaders(b))
checkBuildBase(loadedBuild.unit.localBase)
val newLoader = addOverrides(loadedBuild.unit, addResolvers(loadedBuild.unit, builds.isEmpty, loaders.resetPluginDepth))
// it is important to keep the load order stable, so we sort the remaining URIs
val remainingBases = (refs.flatMap(Reference.uri) reverse_::: bs).sorted
loadAll(remainingBases, references.updated(b, refs), newLoader, builds.updated(b, loadedBuild))
}
case Nil => (references, builds, loaders)
}
def checkProjectBase(buildBase: File, projectBase: File) {
checkDirectory(projectBase)
assert(buildBase == projectBase || IO.relativize(buildBase, projectBase).isDefined, "Directory " + projectBase + " is not contained in build root " + buildBase)
}
def checkBuildBase(base: File) = checkDirectory(base)
def checkDirectory(base: File) {
assert(base.isAbsolute, "Not absolute: " + base)
if (base.isFile)
sys.error("Not a directory: " + base)
else if (!base.exists)
IO createDirectory base
}
def resolveAll(builds: Map[URI, sbt.PartBuildUnit]): Map[URI, sbt.LoadedBuildUnit] =
{
val rootProject = getRootProject(builds)
builds map {
case (uri, unit) =>
(uri, unit.resolveRefs(ref => Scope.resolveProjectRef(uri, rootProject, ref)))
} toMap;
}
def checkAll(referenced: Map[URI, List[ProjectReference]], builds: Map[URI, sbt.PartBuildUnit]) {
val rootProject = getRootProject(builds)
for ((uri, refs) <- referenced; ref <- refs) {
val ProjectRef(refURI, refID) = Scope.resolveProjectRef(uri, rootProject, ref)
val loadedUnit = builds(refURI)
if (!(loadedUnit.defined contains refID)) {
val projectIDs = loadedUnit.defined.keys.toSeq.sorted
sys.error("No project '" + refID + "' in '" + refURI + "'.\\nValid project IDs: " + projectIDs.mkString(", "))
}
}
}
def resolveBase(against: File): Project => Project =
{
def resolve(f: File) =
{
val fResolved = new File(IO.directoryURI(IO.resolve(against, f)))
checkProjectBase(against, fResolved)
fResolved
}
p => p.copy(base = resolve(p.base))
}
def resolveProjects(loaded: sbt.PartBuild): sbt.LoadedBuild =
{
val rootProject = getRootProject(loaded.units)
val units = loaded.units map {
case (uri, unit) =>
IO.assertAbsolute(uri)
(uri, resolveProjects(uri, unit, rootProject))
}
new sbt.LoadedBuild(loaded.root, units)
}
def resolveProjects(uri: URI, unit: sbt.PartBuildUnit, rootProject: URI => String): sbt.LoadedBuildUnit =
{
IO.assertAbsolute(uri)
val resolve = (_: Project).resolve(ref => Scope.resolveProjectRef(uri, rootProject, ref))
new sbt.LoadedBuildUnit(unit.unit, unit.defined mapValues resolve toMap, unit.rootProjects, unit.buildSettings)
}
def projects(unit: sbt.BuildUnit): Seq[Project] =
{
// we don't have the complete build graph loaded, so we don't have the rootProject function yet.
// Therefore, we use resolveProjectBuild instead of resolveProjectRef. After all builds are loaded, we can fully resolve ProjectReferences.
val resolveBuild = (_: Project).resolveBuild(ref => Scope.resolveProjectBuild(unit.uri, ref))
// although the default loader will resolve the project base directory, other loaders may not, so run resolveBase here as well
unit.definitions.projects.map(resolveBuild compose resolveBase(unit.localBase))
}
def getRootProject(map: Map[URI, sbt.BuildUnitBase]): URI => String =
uri => getBuild(map, uri).rootProjects.headOption getOrElse emptyBuild(uri)
def getConfiguration(map: Map[URI, sbt.LoadedBuildUnit], uri: URI, id: String, conf: ConfigKey): Configuration =
configurationOpt(map, uri, id, conf) getOrElse noConfiguration(uri, id, conf.name)
def configurationOpt(map: Map[URI, sbt.LoadedBuildUnit], uri: URI, id: String, conf: ConfigKey): Option[Configuration] =
getProject(map, uri, id).configurations.find(_.name == conf.name)
def getProject(map: Map[URI, sbt.LoadedBuildUnit], uri: URI, id: String): ResolvedProject =
getBuild(map, uri).defined.getOrElse(id, noProject(uri, id))
def getBuild[T](map: Map[URI, T], uri: URI): T =
map.getOrElse(uri, noBuild(uri))
def emptyBuild(uri: URI) = sys.error(s"No root project defined for build unit '$uri'")
def noBuild(uri: URI) = sys.error(s"Build unit '$uri' not defined.")
def noProject(uri: URI, id: String) = sys.error(s"No project '$id' defined in '$uri'.")
def noConfiguration(uri: URI, id: String, conf: String) = sys.error(s"No configuration '$conf' defined in project '$id' in '$uri'")
def loadUnit(uri: URI, localBase: File, s: State, config: sbt.LoadBuildConfiguration): sbt.BuildUnit =
{
val normBase = localBase.getCanonicalFile
val defDir = projectStandard(normBase)
val plugs = plugins(defDir, s, config.copy(pluginManagement = config.pluginManagement.forPlugin))
val defsScala = plugs.detected.builds.values
lazy val eval = mkEval(plugs.classpath, defDir, plugs.pluginData.scalacOptions)
val initialProjects = defsScala.flatMap(b => projectsFromBuild(b, normBase))
val memoSettings = new mutable.HashMap[File, LoadedSbtFile]
def loadProjects(ps: Seq[Project]) = loadTransitive(ps, normBase, plugs, () => eval, config.injectSettings, Nil, memoSettings, config.log)
val loadedProjectsRaw = loadProjects(initialProjects)
val hasRoot = loadedProjectsRaw.exists(_.base == normBase) || defsScala.exists(_.rootProject.isDefined)
val (loadedProjects, defaultBuildIfNone) =
if (hasRoot)
(loadedProjectsRaw, Build.defaultEmpty)
else {
val existingIDs = loadedProjectsRaw.map(_.id)
val refs = existingIDs.map(id => ProjectRef(uri, id))
val defaultID = autoID(normBase, config.pluginManagement.context, existingIDs)
val b = Build.defaultAggregated(defaultID, refs)
val defaultProjects = loadProjects(projectsFromBuild(b, normBase))
(defaultProjects ++ loadedProjectsRaw, b)
}
val defs = if (defsScala.isEmpty) defaultBuildIfNone :: Nil else defsScala
val loadedDefs = new sbt.LoadedDefinitions(defDir, Nil, plugs.loader, defs, loadedProjects, plugs.detected.builds.names)
new sbt.BuildUnit(uri, normBase, loadedDefs, plugs)
}
private[this] def autoID(localBase: File, context: PluginManagement.Context, existingIDs: Seq[String]): String =
{
def normalizeID(f: File) = Project.normalizeProjectID(f.getName) match {
case Right(id) => id
case Left(msg) => error(autoIDError(f, msg))
}
def nthParentName(f: File, i: Int): String =
if (f eq null) Build.defaultID(localBase) else if (i <= 0) normalizeID(f) else nthParentName(f.getParentFile, i - 1)
val pluginDepth = context.pluginProjectDepth
val postfix = "-build" * pluginDepth
val idBase = if (context.globalPluginProject) "global-plugins" else nthParentName(localBase, pluginDepth)
val tryID = idBase + postfix
if (existingIDs.contains(tryID)) Build.defaultID(localBase) else tryID
}
private[this] def autoIDError(base: File, reason: String): String =
"Could not derive root project ID from directory " + base.getAbsolutePath + ":\\n" +
reason + "\\nRename the directory or explicitly define a root project."
private[this] def projectsFromBuild(b: Build, base: File): Seq[Project] =
b.projectDefinitions(base).map(resolveBase(base))
private[this] def loadTransitive(newProjects: Seq[Project], buildBase: File, plugins: sbt.LoadedPlugins, eval: () => Eval, injectSettings: InjectSettings,
acc: Seq[Project], memoSettings: mutable.Map[File, LoadedSbtFile], log: Logger): Seq[Project] =
{
def loadSbtFiles(auto: AddSettings, base: File, autoPlugins: Seq[AutoPlugin], projectSettings: Seq[Setting[_]]): LoadedSbtFile =
loadSettings(auto, base, plugins, eval, injectSettings, memoSettings, autoPlugins, projectSettings)
def loadForProjects = newProjects map { project =>
val autoPlugins =
try plugins.detected.deducePlugins(project.plugins, log)
catch { case e: AutoPluginException => throw translateAutoPluginException(e, project) }
val autoConfigs = autoPlugins.flatMap(_.projectConfigurations)
val loadedSbtFiles = loadSbtFiles(project.auto, project.base, autoPlugins, project.settings)
// add the automatically selected settings, record the selected AutoPlugins, and register the automatically selected configurations
val transformed = project.copy(settings = loadedSbtFiles.settings).setAutoPlugins(autoPlugins).prefixConfigs(autoConfigs: _*)
(transformed, loadedSbtFiles.projects)
}
def defaultLoad = loadSbtFiles(AddSettings.defaultSbtFiles, buildBase, Nil, Nil).projects
val (nextProjects, loadedProjects) =
if (newProjects.isEmpty) // load the .sbt files in the root directory to look for Projects
(defaultLoad, acc)
else {
val (transformed, np) = loadForProjects.unzip
(np.flatten, transformed ++ acc)
}
if (nextProjects.isEmpty)
loadedProjects
else
loadTransitive(nextProjects, buildBase, plugins, eval, injectSettings, loadedProjects, memoSettings, log)
}
private[this] def translateAutoPluginException(e: AutoPluginException, project: Project): AutoPluginException =
e.withPrefix(s"Error determining plugins for project '${project.id}' in ${project.base}:\\n")
private[this] def loadSettings(auto: AddSettings, projectBase: File, loadedPlugins: sbt.LoadedPlugins, eval: () => Eval, injectSettings: InjectSettings, memoSettings: mutable.Map[File, LoadedSbtFile], autoPlugins: Seq[AutoPlugin], buildScalaFiles: Seq[Setting[_]]): LoadedSbtFile =
{
lazy val defaultSbtFiles = configurationSources(projectBase)
def settings(ss: Seq[Setting[_]]) = new LoadedSbtFile(ss, Nil, Nil)
val loader = loadedPlugins.loader
def merge(ls: Seq[LoadedSbtFile]): LoadedSbtFile = (LoadedSbtFile.empty /: ls) { _ merge _ }
def loadSettings(fs: Seq[File]): LoadedSbtFile =
merge(fs.sortBy(_.getName).map(memoLoadSettingsFile))
def memoLoadSettingsFile(src: File): LoadedSbtFile = memoSettings.get(src) getOrElse {
val lf = loadSettingsFile(src)
memoSettings.put(src, lf.clearProjects) // don't load projects twice
lf
}
def loadSettingsFile(src: File): LoadedSbtFile =
EvaluateConfigurations.evaluateSbtFile(eval(), src, IO.readLines(src), loadedPlugins.detected.imports, 0)(loader)
import AddSettings.{ User, SbtFiles, DefaultSbtFiles, Plugins, AutoPlugins, Sequence, BuildScalaFiles }
def pluginSettings(f: Plugins) = {
val included = loadedPlugins.detected.plugins.values.filter(f.include) // don't apply the filter to AutoPlugins, only Plugins
included.flatMap(p => p.settings.filter(isProjectThis) ++ p.projectSettings)
}
// Filter the AutoPlugin settings we included based on which ones are
// intended in the AddSettings.AutoPlugins filter.
def autoPluginSettings(f: AutoPlugins) =
autoPlugins.filter(f.include).flatMap(_.projectSettings)
def expand(auto: AddSettings): LoadedSbtFile = auto match {
case BuildScalaFiles => settings(buildScalaFiles)
case User => settings(injectSettings.projectLoaded(loader))
case sf: SbtFiles => loadSettings(sf.files.map(f => IO.resolve(projectBase, f)))
case sf: DefaultSbtFiles => loadSettings(defaultSbtFiles.filter(sf.include))
case p: Plugins => settings(pluginSettings(p))
case p: AutoPlugins => settings(autoPluginSettings(p))
case q: Sequence => (LoadedSbtFile.empty /: q.sequence) { (b, add) => b.merge(expand(add)) }
}
expand(auto)
}
@deprecated("No longer used.", "0.13.0")
def globalPluginClasspath(globalPlugin: Option[GlobalPlugin]): Seq[Attributed[File]] =
globalPlugin match {
case Some(cp) => cp.data.fullClasspath
case None => Nil
}
val autoPluginSettings: Seq[Setting[_]] = inScope(GlobalScope in LocalRootProject)(Seq(
sbtPlugin :== true,
pluginData := {
val prod = (exportedProducts in Configurations.Runtime).value
val cp = (fullClasspath in Configurations.Runtime).value
val opts = (scalacOptions in Configurations.Compile).value
PluginData(removeEntries(cp, prod), prod, Some(fullResolvers.value), Some(update.value), opts)
},
onLoadMessage := ("Loading project definition from " + baseDirectory.value)
))
private[this] def removeEntries(cp: Seq[Attributed[File]], remove: Seq[Attributed[File]]): Seq[Attributed[File]] =
{
val files = data(remove).toSet
cp filter { f => !files.contains(f.data) }
}
def enableSbtPlugin(config: sbt.LoadBuildConfiguration): sbt.LoadBuildConfiguration =
config.copy(injectSettings = config.injectSettings.copy(
global = autoPluginSettings ++ config.injectSettings.global,
project = config.pluginManagement.inject ++ config.injectSettings.project
))
def activateGlobalPlugin(config: sbt.LoadBuildConfiguration): sbt.LoadBuildConfiguration =
config.globalPlugin match {
case Some(gp) => config.copy(injectSettings = config.injectSettings.copy(project = gp.inject))
case None => config
}
def plugins(dir: File, s: State, config: sbt.LoadBuildConfiguration): sbt.LoadedPlugins =
if (hasDefinition(dir))
buildPlugins(dir, s, enableSbtPlugin(activateGlobalPlugin(config)))
else
noPlugins(dir, config)
def hasDefinition(dir: File) =
{
import Path._
!(dir * -GlobFilter(DefaultTargetName)).get.isEmpty
}
def noPlugins(dir: File, config: sbt.LoadBuildConfiguration): sbt.LoadedPlugins =
loadPluginDefinition(dir, config, PluginData(config.classpath, None, None))
def buildPlugins(dir: File, s: State, config: sbt.LoadBuildConfiguration): sbt.LoadedPlugins =
loadPluginDefinition(dir, config, buildPluginDefinition(dir, s, config))
def loadPluginDefinition(dir: File, config: sbt.LoadBuildConfiguration, pluginData: PluginData): sbt.LoadedPlugins =
{
val (definitionClasspath, pluginLoader) = pluginDefinitionLoader(config, pluginData)
loadPlugins(dir, pluginData.copy(dependencyClasspath = definitionClasspath), pluginLoader)
}
def pluginDefinitionLoader(config: sbt.LoadBuildConfiguration, dependencyClasspath: Seq[Attributed[File]]): (Seq[Attributed[File]], ClassLoader) =
pluginDefinitionLoader(config, dependencyClasspath, Nil)
def pluginDefinitionLoader(config: sbt.LoadBuildConfiguration, pluginData: PluginData): (Seq[Attributed[File]], ClassLoader) =
pluginDefinitionLoader(config, pluginData.dependencyClasspath, pluginData.definitionClasspath)
def pluginDefinitionLoader(config: sbt.LoadBuildConfiguration, depcp: Seq[Attributed[File]], defcp: Seq[Attributed[File]]): (Seq[Attributed[File]], ClassLoader) =
{
val definitionClasspath =
if (depcp.isEmpty)
config.classpath
else
(depcp ++ config.classpath).distinct
val pm = config.pluginManagement
// only the dependencyClasspath goes in the common plugin class loader ...
def addToLoader() = pm.loader add Path.toURLs(data(depcp))
val parentLoader = if (depcp.isEmpty) pm.initialLoader else { addToLoader(); pm.loader }
val pluginLoader =
if (defcp.isEmpty)
parentLoader
else {
// ... the build definition classes get their own loader so that they don't conflict with other build definitions (#511)
ClasspathUtilities.toLoader(data(defcp), parentLoader)
}
(definitionClasspath, pluginLoader)
}
def buildPluginDefinition(dir: File, s: State, config: sbt.LoadBuildConfiguration): PluginData =
{
val (eval, pluginDef) = apply(dir, s, config)
val pluginState = Project.setProject(Load.initialSession(pluginDef, eval), pluginDef, s)
config.evalPluginDef(Project.structure(pluginState), pluginState)
}
@deprecated("Use ModuleUtilities.getCheckedObjects[Build].", "0.13.2")
def loadDefinitions(loader: ClassLoader, defs: Seq[String]): Seq[Build] =
defs map { definition => loadDefinition(loader, definition) }
@deprecated("Use ModuleUtilities.getCheckedObject[Build].", "0.13.2")
def loadDefinition(loader: ClassLoader, definition: String): Build =
ModuleUtilities.getObject(definition, loader).asInstanceOf[Build]
def loadPlugins(dir: File, data: PluginData, loader: ClassLoader): sbt.LoadedPlugins =
new sbt.LoadedPlugins(dir, data, loader, PluginDiscovery.discoverAll(data, loader))
@deprecated("Replaced by the more general PluginDiscovery.binarySourceModuleNames and will be made private.", "0.13.2")
def getPluginNames(classpath: Seq[Attributed[File]], loader: ClassLoader): Seq[String] =
PluginDiscovery.binarySourceModuleNames(classpath, loader, PluginDiscovery.Paths.Plugins, classOf[Plugin].getName)
@deprecated("Use PluginDiscovery.binaryModuleNames.", "0.13.2")
def binaryPlugins(classpath: Seq[File], loader: ClassLoader): Seq[String] =
PluginDiscovery.binaryModuleNames(classpath, loader, PluginDiscovery.Paths.Plugins)
@deprecated("Use PluginDiscovery.onClasspath", "0.13.2")
def onClasspath(classpath: Seq[File])(url: URL): Boolean =
PluginDiscovery.onClasspath(classpath)(url)
@deprecated("Use ModuleUtilities.getCheckedObjects[Plugin].", "0.13.2")
def loadPlugins(loader: ClassLoader, pluginNames: Seq[String]): Seq[Plugin] =
ModuleUtilities.getCheckedObjects[Plugin](pluginNames, loader).map(_._2)
@deprecated("Use ModuleUtilities.getCheckedObject[Plugin].", "0.13.2")
def loadPlugin(pluginName: String, loader: ClassLoader): Plugin =
ModuleUtilities.getCheckedObject[Plugin](pluginName, loader)
@deprecated("No longer used.", "0.13.2")
def findPlugins(analysis: inc.Analysis): Seq[String] = discover(analysis, "sbt.Plugin")
@deprecated("No longer used.", "0.13.2")
def findDefinitions(analysis: inc.Analysis): Seq[String] = discover(analysis, "sbt.Build")
@deprecated("Use PluginDiscovery.sourceModuleNames", "0.13.2")
def discover(analysis: inc.Analysis, subclasses: String*): Seq[String] =
PluginDiscovery.sourceModuleNames(analysis, subclasses: _*)
def initialSession(structure: sbt.BuildStructure, rootEval: () => Eval, s: State): SessionSettings = {
val session = s get Keys.sessionSettings
val currentProject = session map (_.currentProject) getOrElse Map.empty
val currentBuild = session map (_.currentBuild) filter (uri => structure.units.keys exists (uri ==)) getOrElse structure.root
new SessionSettings(currentBuild, projectMap(structure, currentProject), structure.settings, Map.empty, Nil, rootEval)
}
def initialSession(structure: sbt.BuildStructure, rootEval: () => Eval): SessionSettings =
new SessionSettings(structure.root, projectMap(structure, Map.empty), structure.settings, Map.empty, Nil, rootEval)
def projectMap(structure: sbt.BuildStructure, current: Map[URI, String]): Map[URI, String] =
{
val units = structure.units
val getRoot = getRootProject(units)
def project(uri: URI) = {
current get uri filter {
p => structure allProjects uri map (_.id) contains p
} getOrElse getRoot(uri)
}
units.keys.map(uri => (uri, project(uri))).toMap
}
def defaultEvalOptions: Seq[String] = Nil
@deprecated("Use BuildUtil.baseImports", "0.13.0")
def baseImports = BuildUtil.baseImports
@deprecated("Use BuildUtil.checkCycles", "0.13.0")
def checkCycles(units: Map[URI, sbt.LoadedBuildUnit]): Unit = BuildUtil.checkCycles(units)
@deprecated("Use BuildUtil.importAll", "0.13.0")
def importAll(values: Seq[String]): Seq[String] = BuildUtil.importAll(values)
@deprecated("Use BuildUtil.importAllRoot", "0.13.0")
def importAllRoot(values: Seq[String]): Seq[String] = BuildUtil.importAllRoot(values)
@deprecated("Use BuildUtil.rootedNames", "0.13.0")
def rootedName(s: String): String = BuildUtil.rootedName(s)
@deprecated("Use BuildUtil.getImports", "0.13.0")
def getImports(unit: sbt.BuildUnit): Seq[String] = BuildUtil.getImports(unit)
def referenced[PR <: ProjectReference](definitions: Seq[ProjectDefinition[PR]]): Seq[PR] = definitions flatMap { _.referenced }
@deprecated("LoadedBuildUnit is now top-level", "0.13.0")
type LoadedBuildUnit = sbt.LoadedBuildUnit
@deprecated("BuildStructure is now top-level", "0.13.0")
type BuildStructure = sbt.BuildStructure
@deprecated("StructureIndex is now top-level", "0.13.0")
type StructureIndex = sbt.StructureIndex
@deprecated("LoadBuildConfiguration is now top-level", "0.13.0")
type LoadBuildConfiguration = sbt.LoadBuildConfiguration
@deprecated("LoadBuildConfiguration is now top-level", "0.13.0")
val LoadBuildConfiguration = sbt.LoadBuildConfiguration
final class EvaluatedConfigurations(val eval: Eval, val settings: Seq[Setting[_]])
final case class InjectSettings(global: Seq[Setting[_]], project: Seq[Setting[_]], projectLoaded: ClassLoader => Seq[Setting[_]])
@deprecated("LoadedDefinitions is now top-level", "0.13.0")
type LoadedDefinitions = sbt.LoadedDefinitions
@deprecated("LoadedPlugins is now top-level", "0.13.0")
type LoadedPlugins = sbt.LoadedPlugins
@deprecated("BuildUnit is now top-level", "0.13.0")
type BuildUnit = sbt.BuildUnit
@deprecated("LoadedBuild is now top-level", "0.13.0")
type LoadedBuild = sbt.LoadedBuild
@deprecated("PartBuild is now top-level", "0.13.0")
type PartBuild = sbt.PartBuild
@deprecated("BuildUnitBase is now top-level", "0.13.0")
type BuildUnitBase = sbt.BuildUnitBase
@deprecated("PartBuildUnit is now top-level", "0.13.0")
type PartBuildUnit = sbt.PartBuildUnit
@deprecated("Use BuildUtil.apply", "0.13.0")
def buildUtil(root: URI, units: Map[URI, sbt.LoadedBuildUnit], keyIndex: KeyIndex, data: Settings[Scope]): BuildUtil[ResolvedProject] = BuildUtil(root, units, keyIndex, data)
}
final case class LoadBuildConfiguration(stagingDirectory: File, classpath: Seq[Attributed[File]], loader: ClassLoader,
compilers: Compilers, evalPluginDef: (sbt.BuildStructure, State) => PluginData, definesClass: DefinesClass,
delegates: sbt.LoadedBuild => Scope => Seq[Scope], scopeLocal: ScopeLocal,
pluginManagement: PluginManagement, injectSettings: Load.InjectSettings, globalPlugin: Option[GlobalPlugin], extraBuilds: Seq[URI],
log: Logger) {
@deprecated("Use `classpath`.", "0.13.0")
lazy val globalPluginClasspath = classpath
@deprecated("Use `pluginManagement.initialLoader`.", "0.13.0")
lazy val globalPluginLoader = pluginManagement.initialLoader
lazy val globalPluginNames = if (classpath.isEmpty) Nil else Load.getPluginNames(classpath, pluginManagement.initialLoader)
}
final class IncompatiblePluginsException(msg: String, cause: Throwable) extends Exception(msg, cause)
|
jaceklaskowski/sbt
|
main/src/main/scala/sbt/Load.scala
|
Scala
|
bsd-3-clause
| 40,902
|
package com.github.truerss.plugins
import java.net.URL
import com.github.truerss.base.ContentTypeParam.{RequestParam, HtmlRequest, UrlRequest}
import com.github.truerss.base.{ContentTypeParam, Video, BaseContentPlugin, Errors}
import com.typesafe.config.{Config, ConfigFactory}
class YoutubePlugin(config: Config = ConfigFactory.empty) extends BaseContentPlugin(config) {
override val pluginName = "YoutubePlugin"
override val author = "fntz <mike.fch1@gmail.com>"
override val about = "Embed Youtube Video"
override val version = "1.0.1"
override val contentType = Video
override val priority = 10
override val contentTypeParam = ContentTypeParam.URL
private val links = Vector("youtube.com", "youtu.be", "y2u.be")
override def matchUrl(url: URL) = {
val host = url.getHost
if (links.exists(x => host.endsWith(x))) {
true
} else {
false
}
}
override def content(urlOrContent: RequestParam): Response = {
urlOrContent match {
case UrlRequest(url) =>
val need = url.toString.replace("watch?v=", "embed/")
Right(Some(s"""
<iframe width="560" height="315"
src="$need"
frameborder="0" allowfullscreen>
</iframe>
"""))
case HtmlRequest(_) =>
Left(Errors.UnexpectedError("Pass url instead of content"))
}
}
}
|
truerss/plugins
|
truerss-youtube-plugin/src/main/scala/com/github/truerss/plugins/YoutubePlugin.scala
|
Scala
|
mit
| 1,355
|
package com.arcusys.valamis.persistence.impl.scorm.storage
import java.sql.Connection
import com.arcusys.valamis.lesson.scorm.model.ScormUser
import com.arcusys.valamis.lesson.scorm.model.manifest._
import com.arcusys.valamis.lesson.scorm.storage.ActivityDataStorage
import com.arcusys.valamis.lesson.scorm.storage.sequencing.SequencingStorage
import com.arcusys.valamis.persistence.common.SlickProfile
import com.arcusys.valamis.persistence.impl.scorm.schema._
import org.scalatest.{BeforeAndAfter, FunSuite}
import scala.slick.driver.H2Driver
import scala.slick.driver.H2Driver.simple._
/**
* Created by eboystova on 10.05.16.
*/
class ActivityDataStorageTest extends FunSuite
with ActivityTableComponent
with ActivityDataMapTableComponent
with ChildrenSelectionTableComponent
with ConditionRuleTableComponent
with SequencingTableComponent
with SeqPermissionsTableComponent
with SequencingTrackingTableComponent
with ScormUserComponent
with RollupContributionTableComponent
with RollupRuleTableComponent
with ObjectiveTableComponent
with ObjectiveMapTableComponent
with SlickProfile
with BeforeAndAfter {
val db = Database.forURL("jdbc:h2:mem:ActivityDataStorageTest", driver = "org.h2.Driver")
override val driver = H2Driver
val storages = new StorageFactory(db, driver)
val activityDataStorage = storages.getActivityDataStorage
val activityStorage = storages.getActivityStorage
val scormUserStorage = storages.getScormUserStorage
var connection: Connection = _
// db data will be released after connection close
before {
connection = db.source.createConnection()
createSchema()
}
after {
connection.close()
}
def createSchema() {
import driver.simple._
db.withSession { implicit session =>
activityTQ.ddl.create
scormUsersTQ.ddl.create
sequencingTQ.ddl.create
seqPermissionsTQ.ddl.create
rollupContributionTQ.ddl.create
objectiveTQ.ddl.create
objectiveMapTQ.ddl.create
childrenSelectionTQ.ddl.create
sequencingTrackingTQ.ddl.create
conditionRuleTQ.ddl.create
rollupRuleTQ.ddl.create
activityDataMapTQ.ddl.create
}
}
test("execute 'create' without errors") {
val scormUser = ScormUser(123, "Name", 1, "language", 1, 0)
scormUserStorage.add(scormUser)
val activity = new Organization(id = "organization id",
title = "title",
objectivesGlobalToSystem = true,
sharedDataGlobalToSystem = true,
sequencing = Sequencing.Default,
completionThreshold = CompletionThreshold.Default,
metadata = None)
activityStorage.create(1, activity)
val activityDataMap = new ActivityDataMap(
targetId = "target id",
readSharedData = true,
writeSharedData = true)
activityDataStorage.create(1, "organization id", activityDataMap)
import driver.simple._
db.withSession { implicit session =>
val isActivity = activityDataMapTQ.filter(a => a.packageId === 1L && a.activityId === "organization id").exists.run
assert(isActivity)
}
}
test("execute 'delete' without errors") {
val scormUser = ScormUser(123, "Name", 1, "language", 1, 0)
scormUserStorage.add(scormUser)
val activity = new Organization(id = "organization id",
title = "title",
objectivesGlobalToSystem = true,
sharedDataGlobalToSystem = true,
sequencing = Sequencing.Default,
completionThreshold = CompletionThreshold.Default,
metadata = None)
activityStorage.create(1, activity)
val activityDataMap = new ActivityDataMap(
targetId = "target id",
readSharedData = true,
writeSharedData = true)
activityDataStorage.create(1, "organization id", activityDataMap)
activityDataStorage.delete(1, "organization id")
import driver.simple._
db.withSession { implicit session =>
val isActivity = activityDataMapTQ.filter(a => a.packageId === 1L && a.activityId === "organization id").exists.run
assert(!isActivity)
}
}
test("execute 'getForActivity' without errors") {
val scormUser = ScormUser(123, "Name", 1, "language", 1, 0)
scormUserStorage.add(scormUser)
val activity = new Organization(id = "organization id",
title = "title",
objectivesGlobalToSystem = true,
sharedDataGlobalToSystem = true,
sequencing = Sequencing.Default,
completionThreshold = CompletionThreshold.Default,
metadata = None)
activityStorage.create(1, activity)
val activityDataMap = new ActivityDataMap(
targetId = "target id",
readSharedData = true,
writeSharedData = true)
activityDataStorage.create(1, "organization id", activityDataMap)
val activityData = activityDataStorage.getForActivity(1, "organization id")
assert(activityData.nonEmpty)
}
}
|
igor-borisov/valamis
|
valamis-slick-persistence/src/test/scala/com/arcusys/valamis/persistence/impl/scorm/storage/ActivityDataStorageTest.scala
|
Scala
|
gpl-3.0
| 4,841
|
package lila.socket
import chess.Centis
import com.github.blemale.scaffeine.Cache
import scala.concurrent.duration._
object UserLagCache {
private val cache: Cache[String, Centis] = lila.memo.CacheApi.scaffeineNoScheduler
.expireAfterWrite(15 minutes)
.build[String, Centis]()
def put(userId: String, lag: Centis): Unit =
if (lag.centis >= 0)
cache.put(
userId,
cache.getIfPresent(userId).fold(lag) {
_ avg lag
}
)
def get(userId: String): Option[Centis] = cache.getIfPresent(userId)
def getLagRating(userId: String): Option[Int] =
get(userId) map {
case i if i <= Centis(15) => 4
case i if i <= Centis(30) => 3
case i if i <= Centis(50) => 2
case _ => 1
}
}
|
luanlv/lila
|
modules/socket/src/main/UserLagCache.scala
|
Scala
|
mit
| 782
|
package org.jetbrains.plugins.scala.lang.psi.api.base.patterns
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScSequenceArg
import org.jetbrains.plugins.scala.lang.psi.api.statements.params._
/**
* @author Alexander Podkhalyuzin
* Date: 28.02.2008
*/
trait ScPatternArgumentList extends ScArguments {
def patterns: Seq[ScPattern]
def missedLastExpr: Boolean = {
var child = getLastChild
while (child != null && child.getNode.getElementType != ScalaTokenTypes.tCOMMA) {
if (child.isInstanceOf[ScPattern] || child.isInstanceOf[ScSequenceArg]) return false
child = child.getPrevSibling
}
return child != null && child.getNode.getElementType == ScalaTokenTypes.tCOMMA
}
override def getArgsCount: Int = patterns.length
}
|
gtache/intellij-lsp
|
intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/psi/api/base/patterns/ScPatternArgumentList.scala
|
Scala
|
apache-2.0
| 832
|
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package support
import com.google.inject.AbstractModule
import connectors.HmrcTierConnector
import controllers.actions.{AuthAction, NoSessionCheckAction}
import org.mockito.Mockito.mock
import org.specs2.specification.Scope
import services.{EiLListService, SessionService}
import utils.{TestAuthAction, TestNoSessionCheckAction}
trait ServiceExclusionSetup extends Scope {
object GuiceTestModule extends AbstractModule {
override def configure(): Unit = {
bind(classOf[EiLListService]).to(classOf[StubEiLListServiceOneExclusion])
bind(classOf[AuthAction]).to(classOf[TestAuthAction])
bind(classOf[NoSessionCheckAction]).to(classOf[TestNoSessionCheckAction])
bind(classOf[HmrcTierConnector]).toInstance(mock(classOf[HmrcTierConnector]))
bind(classOf[SessionService]).toInstance(mock(classOf[SessionService]))
}
}
}
|
hmrc/pbik-frontend
|
test/support/ServiceExclusionSetup.scala
|
Scala
|
apache-2.0
| 1,464
|
/*
* Copyright (C) 2012-2013 Age Mooij (http://scalapenos.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.scalapenos.riak
class UnsafeBucketOperationsSpec extends RiakClientSpecification with RandomKeySupport with RandomBucketSupport {
def randomUnsafeBucketOperations = randomBucket(client).unsafe
"UnsafeBucketOperations" should {
"list all keys" in {
val unsafeBucketOperations = randomUnsafeBucketOperations
val numberOfKeys = 5
val keys = (1 to numberOfKeys).map(_ ⇒ randomKey)
keys.foreach { key ⇒
unsafeBucketOperations.store(key, "value").await
}
val allKeys = unsafeBucketOperations.allKeys().await
keys.foreach { key ⇒
unsafeBucketOperations.delete(key).await
}
allKeys.keys.toSet must beEqualTo(keys.toSet)
}
"list all keys from an empty unsafeBucketOperations" in {
val unsafeBucketOperations = randomUnsafeBucketOperations
val allKeys = unsafeBucketOperations.allKeys().await
allKeys.keys should be(Nil)
}
}
}
|
ajantis/riak-scala-client
|
src/test/scala/com/scalapenos/riak/UnsafeBucketOperationsSpec.scala
|
Scala
|
apache-2.0
| 1,578
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.examples.wordspec.beforeandafter
import org.scalatest.WordSpec
import org.scalatest.BeforeAndAfter
import collection.mutable.ListBuffer
class ExampleSpec extends WordSpec with BeforeAndAfter {
val builder = new StringBuilder
val buffer = new ListBuffer[String]
before {
builder.append("ScalaTest is ")
}
after {
builder.clear()
buffer.clear()
}
"Testing" should {
"be easy" in {
builder.append("easy!")
assert(builder.toString === "ScalaTest is easy!")
assert(buffer.isEmpty)
buffer += "sweet"
}
"be fun" in {
builder.append("fun!")
assert(builder.toString === "ScalaTest is fun!")
assert(buffer.isEmpty)
}
}
}
|
dotty-staging/scalatest
|
examples/src/test/scala/org/scalatest/examples/wordspec/beforeandafter/ExampleSpec.scala
|
Scala
|
apache-2.0
| 1,327
|
package ui.geometry
import ui.arrayBuffer.ArrayBuffer
/*
class Box extends Geometry(new VertexAttributes(List[Float](
-1f, -1f,
1f, -1f,
1f, 1f,
1f, 1f,
-1f, 1f,
-1f, -1f
))) {
}
object Box {
def apply(): Box = new Box
}
*/
|
gvatn/play-scalajs-webgl-spark
|
client/src/main/scala/ui/geometry/Box.scala
|
Scala
|
mit
| 292
|
import org.gnu.glpk.GLPK;
import org.gnu.glpk.GLPKConstants;
import org.gnu.glpk.GlpkException;
import org.gnu.glpk.SWIGTYPE_p_double;
import org.gnu.glpk.SWIGTYPE_p_int;
import org.gnu.glpk.glp_prob;
import org.gnu.glpk.glp_smcp;
package Khachiyan {
sealed abstract class Bound {}
case object NoBounds extends Bound {}
case class LowerBound (val lower: Double) extends Bound {}
case class UpperBound (val upper: Double) extends Bound {}
case class FullBounds (val lower: Double, val upper: Double) extends Bound {}
case class FixedBound (val equals: Double) extends Bound {}
class Equation (val equation : IndexedSeq [(Int, Double)]) {
lazy val maxVariable = equation.map (_._1).max
require (equation.map (_._1).min >= 0)
require (equation.map (_._1).toSet.size == equation.size)
}
// Note that GLPK is 1-based, so the variables are numbered 1 .. countVariables
// and the bounds for variable i are found in variables (i - 1), since arrays are
// 0 based
class LinearProgram
(val countVariables : Int,
val variables : IndexedSeq [Bound],
val constraints : IndexedSeq [(Equation, Bound)],
val objective: Equation,
val constantObjective : Double,
val maximizeObjective : Boolean) {
def setVariable (variable : Int, newValue : Double) : LinearProgram = {
require (countVariables >= variable)
new LinearProgram (countVariables,
variables.updated(variable - 1, new FixedBound (newValue)), constraints,
objective, constantObjective, maximizeObjective)
}
require (constraints.map (x => x._1.maxVariable).max <= countVariables)
require (variables.length == countVariables)
require (objective.maxVariable <= countVariables)
}
object GLPK_Solver {
private val printCplex = false
val Messages_Off : Int = GLPKConstants.GLP_MSG_OFF
val Messages_Err : Int = GLPKConstants.GLP_MSG_ERR
val Messages_On : Int = GLPKConstants.GLP_MSG_ON
val Messages_All : Int = GLPKConstants.GLP_MSG_ALL
var c : Int = 0
def solve (lp: LinearProgram, errorLevel : Int = Messages_Off):
Either[(String, Int), LinearAnswer] = synchronized
{
val glpk_lp = GLPK.glp_create_prob()
// Insert the variables (columns)
GLPK.glp_add_cols(glpk_lp, lp.countVariables)
for ((varBound, i) <- lp.variables.zipWithIndex) {
varBound match {
case NoBounds => ;
case UpperBound (x) =>
GLPK.glp_set_col_bnds(glpk_lp, i + 1, GLPKConstants.GLP_UP, x, x)
case LowerBound (x) =>
GLPK.glp_set_col_bnds(glpk_lp, i + 1, GLPKConstants.GLP_LO, x, x)
case FullBounds (x, y) =>
GLPK.glp_set_col_bnds(glpk_lp, i + 1, GLPKConstants.GLP_DB, x, y)
case FixedBound (x) =>
GLPK.glp_set_col_bnds(glpk_lp, i + 1, GLPKConstants.GLP_FX, x, x)
}
}
// Insert the constraints (rows)
GLPK.glp_add_rows(glpk_lp, lp.constraints.size);
val maxArrayLength = lp.constraints.map (_._1.equation.size).max
val indices = GLPK.new_intArray(maxArrayLength + 1);
val values = GLPK.new_doubleArray(maxArrayLength + 1);
for (((equ, equBound), currRow) <- lp.constraints.zipWithIndex) {
equBound match {
case NoBounds => ;
case UpperBound (x) =>
GLPK.glp_set_row_bnds(glpk_lp, currRow + 1, GLPKConstants.GLP_UP, x, x)
case LowerBound (x) =>
GLPK.glp_set_row_bnds(glpk_lp, currRow + 1, GLPKConstants.GLP_LO, x, x)
case FullBounds (x, y) =>
GLPK.glp_set_row_bnds(glpk_lp, currRow + 1, GLPKConstants.GLP_DB, x, y)
case FixedBound (x) =>
GLPK.glp_set_row_bnds(glpk_lp, currRow + 1, GLPKConstants.GLP_FX, x, x)
}
for (((equVar, equVal), i) <- equ.equation.zipWithIndex) {
GLPK.intArray_setitem(indices, i + 1, equVar)
GLPK.doubleArray_setitem(values, i + 1, equVal)
}
GLPK.glp_set_mat_row(glpk_lp, currRow + 1, equ.equation.length, indices, values)
}
GLPK.delete_intArray(indices)
GLPK.delete_doubleArray(values)
// Set the objective function
if (lp.maximizeObjective) GLPK.glp_set_obj_dir(glpk_lp, GLPKConstants.GLP_MAX)
else GLPK.glp_set_obj_dir(glpk_lp, GLPKConstants.GLP_MIN)
GLPK.glp_set_obj_coef(glpk_lp, 0, lp.constantObjective) // Set constant value
for ((equVar, equVal) <- lp.objective.equation) {
GLPK.glp_set_obj_coef(glpk_lp, equVar, equVal)
}
// Solve model
val parm = new glp_smcp()
GLPK.glp_init_smcp(parm)
parm.setMsg_lev(errorLevel)
parm.setPresolve(GLPKConstants.GLP_ON)
if (printCplex) {
GLPK.glp_write_lp(glpk_lp, null, "test" + c + ".cplex")
c = c + 1
}
val ret = GLPK.glp_simplex(glpk_lp, parm)
if (ret != 0) {
GLPK.glp_delete_prob (glpk_lp)
ret match {
case GLPKConstants.GLP_EBADB | GLPKConstants.GLP_ESING | GLPKConstants.GLP_ESING =>
Left (("There is a problem with the initial basis. " +
"This message should be impossible.", ret))
case GLPKConstants.GLP_EBOUND =>
Left (("There is an invalid double bounds in the problem.", ret))
case GLPKConstants.GLP_EFAIL =>
Left (("The solver failed.", ret))
case GLPKConstants.GLP_EOBJLL | GLPKConstants.GLP_EOBJUL =>
Left(("The objective function can change without limit in the dual simplex.", ret))
case GLPKConstants.GLP_EITLIM =>
Left(("The iteration limit was exceeded.", ret))
case GLPKConstants.GLP_ETMLIM =>
Left(("The time limit was exceeded.", ret))
case GLPKConstants.GLP_ENOPFS =>
Left(("The problem has no primal feasible solutions.", ret))
case GLPKConstants.GLP_ENODFS =>
Left(("The problem has no dual feasible solutions.", ret))
case _ =>
Left(("An unknown problem has occurred.", ret))
}
} else {
val objVal = GLPK.glp_get_obj_val (glpk_lp)
val coeffs = new Array[Double](lp.countVariables)
for (i <- Range (0, lp.countVariables)) {
coeffs (i) = GLPK.glp_get_col_prim (glpk_lp, i + 1)
}
GLPK.glp_delete_prob(glpk_lp)
Right (new LinearAnswer (coeffs.toVector, objVal))
}
}
}
class LinearAnswer (val variableValues : Vector [Double], val objective : Double);
}
|
Prosfilaes/Cherizo-graph-splitter
|
src/main/scala/khachiyan.scala
|
Scala
|
gpl-3.0
| 6,572
|
/*
* Copyright 2016 Johannes Rudolph
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package net.virtualvoid.reify2
import scala.annotation.compileTimeOnly
import scala.reflect.macros.blackbox.Context
trait Reifier extends WithContext {
import c.universe._
trait Expr[+T] {
@compileTimeOnly("splice can only be used inside of reify")
def splice: T = ???
def tree: Tree
}
trait SeqExpr[+T] {
@compileTimeOnly("spliceSeq can only be used inside of reify")
def spliceSeq: Seq[T] = ???
@compileTimeOnly("spliceStatements can only be used inside of reify")
def spliceStatements: Unit = ???
}
implicit def autoConv[T](exp: Context#Expr[T]): Expr[T] = new Expr[T] { def tree = exp.tree.asInstanceOf[Tree] }
implicit def autoConvReverse[T](e: Expr[T]): c.Expr[T] = c.Expr[T](e.tree)
implicit def convToUnit[T](exp: Expr[T]): Expr[Unit] = new Expr[Unit] { def tree = exp.tree }
@compileTimeOnly("addSpliceSeq can only be used inside of reify")
implicit def addSpliceSeq[T](s: Seq[Expr[T]]): SeqExpr[T] = ???
@compileTimeOnly("addSpliceSeq can only be used inside of reify")
implicit def addSpliceSeq2[T](s: Seq[Context#Expr[T]]): SeqExpr[T] = ???
@compileTimeOnly("reified can only be used inside of reify")
implicit def Reified[T](any: T): { def reified: Expr[T] } = ???
def Expr[T](t: Tree): Expr[T] = new Expr[T] { def tree = t }
def reify[T](t: T): Expr[T] = macro ReifierImpl.reifyImpl[T]
def reifyShow[T](t: T): Expr[T] = macro ReifierImpl.reifyShowImpl[T]
@compileTimeOnly("reifyInner can only be used inside of reify")
def reifyInner[T](t: T): Expr[T] = ???
}
object ReifierImpl {
def reifyShowImpl[T: c.WeakTypeTag](c: Context { type PrefixType = Reifier })(t: c.Expr[T]): c.Expr[c.prefix.value.Expr[T]] = {
val res = reifyImpl(c)(t)
c.info(t.tree.pos, s"For '${t.tree}': ${c.universe.show(res)}", false)
res
}
def reifyImpl[T: c.WeakTypeTag](c: Context { type PrefixType = Reifier })(t: c.Expr[T]): c.Expr[c.prefix.value.Expr[T]] = {
import c.universe._
case class PlaceholderDef(orig: Tree, args: Seq[Tree], tpes: Seq[Type])
var placeholders = Map.empty[TermName, PlaceholderDef]
def addPlaceholder(name: TermName, ph: PlaceholderDef): Unit =
placeholders = placeholders.updated(name, ph)
object InnerReify {
def unapply(tree: Tree): Option[Tree] = tree match {
case q"${ _ }.reifyInner[..${ _ }]($exp)" ⇒ Some(exp)
case q"${ _ }.Reified[..${ _ }]($exp).reified" ⇒ Some(exp)
case _ ⇒ None
}
}
object RemoveInnerReify extends Traverser {
var args: Seq[Tree] = _
var tpes: Seq[Type] = _
override def traverse(tree: Tree): Unit = tree match {
case InnerReify(exp) ⇒
args = args :+ CreatePlaceholders.transform(exp)
tpes = tpes :+ exp.tpe
case _ ⇒ super.traverse(tree)
}
def run(t: Tree): PlaceholderDef = {
args = Seq.empty
tpes = Seq.empty
traverse(t)
PlaceholderDef(t, args, tpes)
}
}
object CreatePlaceholders extends Transformer {
override def transform(tree: Tree): Tree = tree match {
case q"$expr.splice" ⇒
val name = c.freshName(TermName("placeholder$"))
val placeholder = RemoveInnerReify.run(expr)
addPlaceholder(name, placeholder)
q"$name(..${placeholder.args})"
case q"${ _ }.${ TermName("addSpliceSeq") | TermName("addSpliceSeq2") }[..${ _ }]($expr).spliceSeq" ⇒
val name = c.freshName(TermName("placeholderSeq$"))
val placeholder = RemoveInnerReify.run(expr)
addPlaceholder(name, placeholder)
q"scala.collection.immutable.Seq.apply($name(..${placeholder.args}))"
case q"${ _ }.${ TermName("addSpliceSeq") | TermName("addSpliceSeq2") }[..${ _ }]($expr).spliceStatements" ⇒
val name = c.freshName(TermName("placeholderStatements$"))
val placeholder = RemoveInnerReify.run(expr)
addPlaceholder(name, placeholder)
q"{ $name(..${placeholder.args}); () }"
case _ ⇒ super.transform(tree)
}
}
class FindDefinitions extends Traverser {
var definitions = Set.empty[Symbol]
override def traverse(tree: Tree): Unit = tree match {
case v: ValDef ⇒
definitions += v.symbol
traverse(v.rhs)
case _ ⇒ super.traverse(tree)
}
def run(t: Tree): Set[Symbol] = {
traverse(t)
definitions
}
}
class HygienifyDefs(defs: Map[Symbol, TermName]) extends Transformer {
override def transform(t: Tree): Tree = t match {
case v @ ValDef(mods, name, tpt, rhs) if defs.contains(v.symbol) ⇒
ValDef(mods, defs(v.symbol), tpt, transform(rhs))
case x: Ident if defs.contains(x.symbol) ⇒
//println(s"Replaced Ident($x)")
Ident(defs(x.symbol))
case s: Select if defs.contains(s.symbol) ⇒
//println(s"Replaced Select($s)")
Select(s.qualifier, defs(s.symbol))
case _ ⇒ super.transform(t)
}
}
val withPlaceholders = CreatePlaceholders.transform(t.tree)
val allDefs = (new FindDefinitions).run(withPlaceholders)
//println(s"Found defs: $allDefs in $t")
val newNames = allDefs.map { s ⇒
s -> c.freshName(TermName(s.asTerm.name.decodedName + "$"))
}.toMap
val freshenized = new HygienifyDefs(newNames).transform(withPlaceholders)
val justTheNames = newNames.values.toSet
val univ = c.typecheck(q"${c.prefix}.c.universe")
//println(s"Before reification $freshenized")
// We need to copy over the original type in case the original tree was a spliced one
// so that `freshenized` directly points to an untyped placeholder.
// `reifyTree` fails if it is invoked on an untyped tree (it doesn't care about
// inner untyped trees, though...).
c.internal.setType(freshenized, t.tree.tpe)
val reified = c.reifyTree(univ, EmptyTree, freshenized)
//println(s"Reified: $reified")
val pref = c.prefix
def buildExpr[T: c.WeakTypeTag](t: Tree): Tree = q"new $pref.Expr[${c.weakTypeTag[T]}] { val tree = $t.asInstanceOf[$pref.c.universe.Tree] }"
class InsertInnerReifies extends Transformer {
var args = Seq.empty[Tree]
var tpes = Seq.empty[Type]
override def transform(tree: Tree): Tree = tree match {
case InnerReify(_) ⇒
val res = ReplacePlaceholder.transform(args(0))
val tpe = tpes(0)
args = args.tail
tpes = tpes.tail
buildExpr(res)(c.WeakTypeTag(tpe.widen))
case _ ⇒ super.transform(tree)
}
def run(ph: PlaceholderDef, results: Seq[Tree]): Tree = {
args = results
tpes = ph.tpes
transform(ph.orig)
}
}
object NewTermName {
def unapply(tree: Tree): Option[String] = tree match {
// Scala 2.10
case q"${ _ }.newTermName(${ Literal(Constant(name: String)) })" ⇒ Some(name)
// Scala 2.11
case q"${ _ }.TermName(${ Literal(Constant(name: String)) })" ⇒ Some(name)
case _ ⇒ None
}
}
object ReplacePlaceholder extends Transformer {
def replacement(name: String, args: Seq[Tree]): Tree = {
(new InsertInnerReifies).run(placeholders(TermName(name)), args)
}
override def transform(tree: Tree): Tree = tree match {
case q"scala.collection.immutable.List.apply(${ _ }.Apply(${ _ }.Ident(${ NewTermName(name) }), ${ _ }.List.apply(..$args)))" if name.startsWith("placeholderSeq$") ⇒
//println(s"Found Seq placeholder!!! $name\\nBefore: $before\\nAfter: $placed")
val els = q"${replacement(name, args)}.map(_.tree.asInstanceOf[$$u.Tree])"
q"scala.collection.immutable.List.apply($els: _*)"
case q"${ _ }.Apply(${ _ }.Ident(${ NewTermName(name) }), ${ _ }.List.apply(..$args))" if name.startsWith("placeholder$") ⇒
//println(s"Found placeholder!!! $name\\nBefore: $before\\nAfter: $placed")
q"${replacement(name, args)}.tree.asInstanceOf[$$u.Tree]"
case q"scala.collection.immutable.List.apply(${ _ }.Apply(${ _ }.Ident(${ NewTermName(name) }), ${ _ }.List.apply(..$args)))" if name.startsWith("placeholderStatements$") ⇒
//println(s"Found placeholderStatements!!! $name\\nBefore: $tree\\n\\nAfter: $repl")
val els = q"${replacement(name, args)}.map(_.tree.asInstanceOf[$$u.Tree])"
q"scala.collection.immutable.List.apply($els: _*)"
case _ ⇒ super.transform(tree)
}
}
val replaced = ReplacePlaceholder.transform(reified)
//println(s"After placeholder replacement: $replaced")
def createFreshName(name: TermName): Tree = q"val $name = ${c.prefix}.c.freshName(${c.prefix}.c.universe.TermName(${name.decodedName + "$"}))"
object ReplaceFreshNames extends Transformer {
override def transform(tree: Tree): Tree = tree match {
case NewTermName(name) if justTheNames(TermName(name)) ⇒
//println(s"Found instance of $name: $tree")
q"${Ident(TermName(name))}.asInstanceOf[$$u.TermName]"
case _ ⇒ super.transform(tree)
}
}
val withFreshNames =
q"""
..${justTheNames.toSeq.map(createFreshName(_))}
${ReplaceFreshNames.transform(replaced)}
"""
c.Expr[c.prefix.value.Expr[T]](atPos(t.tree.pos)(c.untypecheck(withFreshNames)))
}
}
|
jrudolph/reify-v2
|
reify/src/main/scala/net/virtualvoid/reify2/Reifier.scala
|
Scala
|
apache-2.0
| 10,103
|
import scala.scalajs.js.JSApp
import org.scalajs.dom
import xyz.ariwaranosai.tidori.dom.DomElements._
import xyz.ariwaranosai.tidori.dom.OperatorContext
import scalatags.JsDom.all._
import xyz.ariwaranosai.tidori.dom.DomOperator._
import xyz.ariwaranosai.tidori.dom.BeatOperatorImplicit._
/**
* Created by ariwaranosai on 2017/1/8.
*
*/
object Example extends JSApp {
@scala.scalajs.js.annotation.JSExport
override def main(): Unit = {
println("hello world!")
val pNode = dom.document.getElementById("broad")
val c = OperatorContext(pNode, 200)
val twitter = a(href:="https://twitter.com/ariwaranosai").render.bbs("twitter")
val blog = a(href:="http://ariwaranosai.xyz").render.bbs("blog")
val title = "在原佐为-ありわら の さい ".bb ~: htmlBR.b ~:
" 不入流的码农,不入流的失业党,不入流的死宅 ".bb ~: htmlBR.b ~:
" 脑洞略大,盛产负能量,喜欢读书".bb ~: 4.del() ~: 1.delay ~:
speed(4) ~: "常买书".bb ~: speed(0.25) ~: ",不怎么读 ".bb ~: htmlBR.b ~:
" 机器学习? 数据挖掘? 概率图模型? ".bb ~: htmlBR.b ~:
" vim党 血崩党 冷场达人 玩3ds的任黑 ".bb ~: htmlBR.b ~:
" 学haskell中途夭折,勉强写写Scala凑数,懒得时候写Python ".bb ~: htmlBR.b ~:
" C++什么的逼急了写写,反正都不能帮我毕业 ".bb ~: htmlBR.b ~:
" 审美差写不了前端,也不会写后台 ".bb ~: htmlBR.b ~:
" OSX Fedora Arch Win HHKB ".bb ~: htmlBR.b ~:
" 銀魂最高だ,周三就是该看金光布袋戏 ".bb ~: htmlBR.b ~:
" 链接: ".bb ~: twitter ~: " ".bb ~: blog
title.run(c)
}
}
|
ariwaranosai/tidori
|
examples/src/main/scala/Example.scala
|
Scala
|
mit
| 1,657
|
package io.udash.web.guide.views
object References {
val UdashjQueryWrapperRepo = "https://github.com/UdashFramework/scala-js-jquery"
val UdashGuideRepo = "https://github.com/UdashFramework/udash-core/tree/master/guide"
val UdashG8Repo = "https://github.com/UdashFramework/udash.g8"
val UdashFilesDemoRepo = "https://github.com/UdashFramework/udash-demos/tree/master/file-upload"
val AvScalaCommonsGitHub = "https://github.com/AVSystem/scala-commons"
val BootstrapHomepage = "http://getbootstrap.com/"
val JettyHomepage = "http://www.eclipse.org/jetty/"
val MvpPattern = "https://martinfowler.com/eaaDev/uiArchs.html#Model-view-presentermvp"
val ScalaCssHomepage = "https://github.com/japgolly/scalacss"
val ScalaJsHomepage = "http://www.scala-js.org/"
val ScalaHomepage = "http://www.scala-lang.org/"
val ScalatagsHomepage = "https://github.com/lihaoyi/scalatags"
val UpickleHomepage = "https://github.com/lihaoyi/upickle-pprint"
}
|
UdashFramework/udash-core
|
guide/guide/.js/src/main/scala/io/udash/web/guide/views/References.scala
|
Scala
|
apache-2.0
| 960
|
package codebook.runtime.util
import org.specs2.mutable.Specification
class OptionHelperTest extends Specification {
import OptionHelper._
"FailureLogger" should {
"~>" in {
var count = 0
val a:Option[Int] = None
val b:Option[Int] = Some(1)
a ~> (count += 1)
count must_== 1
b ~> (count += 1)
count must_== 1
}
}
}
|
RustyRaven/CodebookRuntime
|
scala/src/test/scala/codebook/runtime/util/OptionHelperTest.scala
|
Scala
|
mit
| 400
|
package com.acervera.spatialnetworklabs.osm
import java.io._
import org.apache.commons.codec.digest.DigestUtils
import org.openstreetmap.osmosis.pbf2.v0_6.impl.PbfFieldDecoder
import org.scalatest.{WordSpec, FunSpec}
import org.openstreetmap.osmosis.osmbinary.{Osmformat, Fileformat}
class OsmPbfBlockUtilsSpec extends WordSpec {
// "The PbfRawIteratorSpec should" should {
// "Extract rigth raw data" in {
// // val testFile = "/home/angel/workspaces/spatial/spatial-network-labs/tmp/ireland-and-northern-ireland-latest.osm.pbf"
// // val testFile = "/home/angel/projects/spatial-network-labs/tmp/spain-latest.osm.pbf"
// val testFile = "/home/angel/projects/spatial-network-labs/tmp/IE/IE"
// var counter = 0
// var pbfIS: InputStream = null
// try {
// pbfIS = new FileInputStream(testFile)
// PbfRawIterator(pbfIS, rawBlock => {
// val header = Fileformat.BlobHeader.parseFrom(rawBlock.header)
// val blob = Fileformat.Blob.parseFrom(rawBlock.blob)
//
// if(blob.hasZlibData()) {
// println("-----------------------------------------")
// println("HEADER:")
// println("getType: " + header.getType)
// println("getDatasize: " + header.getDatasize + " - " + rawBlock.blob.length)
// println("getIndexdata: " + header.getIndexdata)
// println("getParserForType: " + header.getParserForType)
// println("getSerializedSize: " + header.getSerializedSize)
// println("getTypeBytes: " + header.getTypeBytes)
// println("BLOB:")
// println("getLzmaData: " + blob.getLzmaData)
// println("getDefaultInstanceForType: " + blob.getDefaultInstanceForType)
// println("getParserForType: " + blob.getParserForType)
// println("getRaw: " + blob.getRaw)
// println("getRawSize: " + blob.getRawSize)
// println("getZlibData: " + blob.getZlibData)
// println("hasLzmaData: " + blob.hasLzmaData)
// println("hasRaw: " + blob.hasRaw)
// println("hasRawSize: " + blob.hasRawSize)
// println("hasZlibData: " + blob.hasZlibData)
// }
//
//
//
// val blockType = header.getType
// blockType match {
// case "OSMHeader" => {
// val headerBlock = Osmformat.HeaderBlock.parseFrom(rawBlock.blob)
// println("HEADER Block _______________ " )
// println(s"BBOX: ${headerBlock.getBbox.getLeft},${headerBlock.getBbox.getTop},${headerBlock.getBbox.getRight},${headerBlock.getBbox.getBottom}" )
///* headerBlock.getRequiredFeaturesList.forEach( (f:String) => {
// println(f)
// })*/
// println(headerBlock.getRequiredFeaturesList)
// }
// case "OSMData" => {
// // val headerBlock = Osmformat.PrimitiveBlock.parseFrom(rawBlock.raw)
//// println(s"BBOX: ${headerBlock.getBbox.getLeft},${headerBlock.getBbox.getTop},${headerBlock.getBbox.getRight},${headerBlock.getBbox.getBottom}" )
//// headerBlock.getRequiredFeaturesList
//// println(headerBlock.getOsmosisReplicationTimestamp)
// //val primaryBlock = Osmformat.PrimitiveBlock.parseFrom(rawBlock.blob)
// }
//
// }
//
// counter += 1
// })
//
// // assert(counter == 3, "There are 3 blocks!")
// } finally {
// if (pbfIS != null) pbfIS.close()
// }
// }
def storeRaw(raw: Array[Byte], outPath: String): Unit = {
var outFile: DataOutputStream = null
try {
outFile = new DataOutputStream(new FileOutputStream(outPath))
outFile.write(raw)
} finally {
if (outFile != null) {
outFile.flush()
outFile.close()
}
}
}
/* Temporal code used to generate test data set. */
"Extract right raw data" in {
println("Running Extract right raw data")
val osmPbfFile = "/home/angel/projects/spatial-network-labs/load-osm/src/test/resources/com/acervera/spatialnetworklabs/osm/three_blocks.osm.pbf"
var counter = 0
var pbfIS: InputStream = null
try {
pbfIS = new FileInputStream(osmPbfFile)
PbfRawIterator(pbfIS, rawBlock => {
storeRaw(rawBlock.raw, "/home/angel/projects/spatial-network-labs/tmp/blocks/block_" + counter + ".osm.pbf")
counter += 1
})
println(">>>>>>>>>>>>>>>>>>>" + counter)
//assert(counter == 3)
} finally {
if (pbfIS != null) pbfIS.close()
}
}
/* Temporal code used to generate test data set. */
"Extract different types of raw data" in {
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
val MAX_DENSE_BLOCKS = 3;
val MAX_NODES_BLOCKS = 3;
val MAX_WAYS_BLOCKS = 3;
val MAX_RELATIONS_BLOCKS = 3;
println("Extract different types of raw data")
val osmPbfFile = "/home/angel/projects/spatial-network-labs/tmp/ireland-and-northern-ireland-latest.osm.pbf"
// val osmPbfFile = "/home/angel/projects/spatial-network-labs/tmp/spain-latest.osm.pbf"
var counterNodes = 0
var counterDense = 0
var counterWays = 0
var counterRelations = 0
var pbfIS: InputStream = null
try {
pbfIS = new FileInputStream(osmPbfFile)
PbfRawIterator(pbfIS, rawBlock => {
if (rawBlock.blockType == "OSMData") {
val primitiveBlock = OsmPbfPrimitives.fromRawBlob(rawBlock.blob)
val y = primitiveBlock.getPrimitivegroupList.listIterator.foreach(primaryGroup => {
if (primaryGroup.getDense().getIdCount > 0) {
if (MAX_DENSE_BLOCKS > counterDense) {
storeRaw(rawBlock.raw, "/home/angel/projects/spatial-network-labs/tmp/blocks/dense_block_" + counterDense + ".osm.pbf")
}
counterDense += 1
}
if (primaryGroup.getNodesCount > 0) {
if (MAX_NODES_BLOCKS > counterNodes) {
storeRaw(rawBlock.raw, "/home/angel/projects/spatial-network-labs/tmp/blocks/nodes_block_" + counterNodes + ".osm.pbf")
}
counterNodes += 1
}
if (primaryGroup.getWaysCount > 0) {
if (MAX_WAYS_BLOCKS > counterWays) {
storeRaw(rawBlock.raw, "/home/angel/projects/spatial-network-labs/tmp/blocks/ways_block_" + counterWays + ".osm.pbf")
}
counterWays += 1
}
if (primaryGroup.getRelationsCount > 0) {
if (MAX_RELATIONS_BLOCKS > counterRelations) {
storeRaw(rawBlock.raw, "/home/angel/projects/spatial-network-labs/tmp/blocks/relations_block_" + counterRelations + ".osm.pbf")
}
counterRelations += 1
}
})
}
// var outFile : DataOutputStream = null
// try {
// outFile = new DataOutputStream( new FileOutputStream("/home/angel/projects/spatial-network-labs/tmp/blocks/block_"+counter+".osm.pbf") )
// outFile.write(rawBlock.raw)
// } finally {
// if(outFile!=null) {
// // outFile.write(1)
// outFile.flush()
// outFile.close()
// }
// }
})
} finally {
if (pbfIS != null) pbfIS.close()
}
println("counterDense: " + counterDense)
println("counterNodes: " + counterNodes)
println("counterWays: " + counterWays)
println("counterRelations: " + counterRelations)
}
}
|
angelcervera/spatial-network-labs
|
load-osm/src/test/scala/com/acervera/spatialnetworklabs/osm/OsmPbfBlockUtilsSpec.scala
|
Scala
|
mit
| 7,789
|
import scala.quoted.*
object scalatest {
transparent inline def assertCompile(inline code: String): Unit = ${ assertImpl('code, '{compiletime.testing.typeChecks(code)}, true) }
transparent inline def assertNotCompile(inline code: String): Unit = ${ assertImpl('code, '{compiletime.testing.typeChecks(code)}, false) }
def assertImpl(code: Expr[String], actual: Expr[Boolean], expect: Boolean)(using Quotes) : Expr[Unit] = {
'{ assert(${Expr(expect)} == $actual) }
}
}
|
dotty-staging/dotty
|
tests/run-macros/reflect-typeChecks/assert_1.scala
|
Scala
|
apache-2.0
| 482
|
/**
* Copyright (C) 2013 Stefan Niederhauser (nidin@gmx.ch)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package guru.nidi.atlassian.remote.query
/**
*
*/
class QueryException(val query: String, cause: Throwable) extends RuntimeException("Problem executing '" + query + "': " + cause.getMessage, cause) {
}
|
nidi3/simple-remote-atlassian
|
src/main/scala/guru/nidi/atlassian/remote/query/QueryException.scala
|
Scala
|
apache-2.0
| 832
|
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package collection
package parallel.immutable
import scala.collection.generic.{GenericParTemplate, CanCombineFrom, ParFactory}
import scala.collection.parallel.ParSeqLike
import scala.collection.parallel.Combiner
import scala.collection.parallel.SeqSplitter
import mutable.ArrayBuffer
import immutable.Vector
import immutable.VectorBuilder
import immutable.VectorIterator
/** Immutable parallel vectors, based on vectors.
*
* $paralleliterableinfo
*
* $sideeffects
*
* @tparam T the element type of the vector
*
* @author Aleksandar Prokopec
* @since 2.9
* @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_vector Scala's Parallel Collections Library overview]]
* section on `ParVector` for more information.
*
* @define Coll `immutable.ParVector`
* @define coll immutable parallel vector
*/
class ParVector[+T](private[this] val vector: Vector[T])
extends ParSeq[T]
with GenericParTemplate[T, ParVector]
with ParSeqLike[T, ParVector[T], Vector[T]]
with Serializable
{
override def companion = ParVector
def this() = this(Vector())
def apply(idx: Int) = vector.apply(idx)
def length = vector.length
def splitter: SeqSplitter[T] = {
val pit = new ParVectorIterator(vector.startIndex, vector.endIndex)
vector.initIterator(pit)
pit
}
override def seq: Vector[T] = vector
override def toVector: Vector[T] = vector
class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with SeqSplitter[T] {
def remaining: Int = remainingElementCount
def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter
def split: Seq[ParVectorIterator] = {
val rem = remaining
if (rem >= 2) psplit(rem / 2, rem - rem / 2)
else Seq(this)
}
def psplit(sizes: Int*): Seq[ParVectorIterator] = {
var remvector = remainingVector
val splitted = new ArrayBuffer[Vector[T]]
for (sz <- sizes) {
splitted += remvector.take(sz)
remvector = remvector.drop(sz)
}
splitted.map(v => new ParVector(v).splitter.asInstanceOf[ParVectorIterator])
}
}
}
/** $factoryInfo
* @define Coll `immutable.ParVector`
* @define coll immutable parallel vector
*/
object ParVector extends ParFactory[ParVector] {
implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParVector[T]] =
new GenericCanCombineFrom[T]
def newBuilder[T]: Combiner[T, ParVector[T]] = newCombiner[T]
def newCombiner[T]: Combiner[T, ParVector[T]] = new LazyParVectorCombiner[T] // was: with EPC[T, ParVector[T]]
}
private[immutable] class LazyParVectorCombiner[T] extends Combiner[T, ParVector[T]] {
//self: EnvironmentPassingCombiner[T, ParVector[T]] =>
var sz = 0
val vectors = new ArrayBuffer[VectorBuilder[T]] += new VectorBuilder[T]
def size: Int = sz
def +=(elem: T): this.type = {
vectors.last += elem
sz += 1
this
}
def clear() = {
vectors.clear()
vectors += new VectorBuilder[T]
sz = 0
}
def result: ParVector[T] = {
val rvb = new VectorBuilder[T]
for (vb <- vectors) {
rvb ++= vb.result
}
new ParVector(rvb.result)
}
def combine[U <: T, NewTo >: ParVector[T]](other: Combiner[U, NewTo]) = if (other eq this) this else {
val that = other.asInstanceOf[LazyParVectorCombiner[T]]
sz += that.sz
vectors ++= that.vectors
this
}
}
|
felixmulder/scala
|
src/library/scala/collection/parallel/immutable/ParVector.scala
|
Scala
|
bsd-3-clause
| 3,958
|
package cb008
/**
* Created by liguodong on 2016/7/21.
*/
object EnsureTraitAddedType extends App{
import test5._
val e = new Enterprise
println(e.ejectWarpCore("password"))
println("-----------")
import test6.{Enterprise=>EnterpriceTest,_}
val e2 = new EnterpriceTest
println(e2.ejectWarpCore("password"))
e2.startWarpCore
}
package test5{
trait WarpCore {
this: { def ejectWarpCore(password: String): Boolean } =>
}
class Starship {
// code here ...
}
class Enterprise extends Starship with WarpCore {
def ejectWarpCore(password: String): Boolean = {
if (password == "password") {
println("ejecting core")
true
} else {
false
}
}
}
}
package test6{
trait WarpCore {
this: {
def ejectWarpCore(password: String): Boolean
def startWarpCore: Unit
} =>
}
class Starship
abstract class EnterpriseAbs extends Starship with WarpCore {
def ejectWarpCore(password: String): Boolean = {
if (password == "password") { println("core ejected"); true } else false
}
def startWarpCore { println("core started") }
}
class Enterprise extends EnterpriseAbs
}
|
liguodongIOT/java-scala-mix-sbt
|
src/main/scala/scalacookbook/chapter08/EnsureTraitAddedType.scala
|
Scala
|
apache-2.0
| 1,196
|
package security
import be.objectify.deadbolt.scala.cache.HandlerCache
import play.api.Configuration
import play.api.Environment
import play.api.inject.Binding
import play.api.inject.Module
import scala.collection.Seq
import javax.inject.Singleton
@Singleton
class MyCustomDeadboltHook extends Module {
//------------------------------------------------------------------------
// public
//------------------------------------------------------------------------
override def bindings(environment: Environment, configuration: Configuration): Seq[Binding[_]] = {
Seq(bind[HandlerCache].to[MyHandlerCache].in[Singleton])
}
}
|
bravegag/play-authenticate-usage-scala
|
app/security/MyCustomDeadboltHook.scala
|
Scala
|
apache-2.0
| 639
|
package types
import scala.language/*=>scala.language.*/.existentials/*=>scala.language.existentials.*/
import scala.language/*=>scala.language.*/.higherKinds/*=>scala.language.higherKinds.*/
class ann/*<=types.ann#*/[T/*<=types.ann#[T]*/](x/*<=types.ann#x.*/: T/*=>types.ann#[T]*/) extends scala.annotation.StaticAnnotation/*=>scala.annotation.StaticAnnotation#*/
class ann1/*<=types.ann1#*/ extends scala.annotation.StaticAnnotation/*=>scala.annotation.StaticAnnotation#*/
class ann2/*<=types.ann2#*/ extends scala.annotation.StaticAnnotation/*=>scala.annotation.StaticAnnotation#*/
class B/*<=types.B#*/
class C/*<=types.C#*/
class P/*<=types.P#*/ {
class C/*<=types.P#C#*/
class X/*<=types.P#X#*/
val x/*<=types.P#x.*/ = new X/*=>types.P#X#*/
}
class T/*<=types.T#*/ {
class C/*<=types.T#C#*/
class X/*<=types.T#X#*/
val x/*<=types.T#x.*/ = new X/*=>types.T#X#*/
}
object Test/*<=types.Test.*/ {
class M/*<=types.Test.M#*/ {
def m/*<=types.Test.M#m().*/: Int/*=>scala.Int#*/ = ???/*=>scala.Predef.`???`().*/
}
trait N/*<=types.Test.N#*/ {
def n/*<=types.Test.N#n().*/: Int/*=>scala.Int#*/ = ???/*=>scala.Predef.`???`().*/
}
class C/*<=types.Test.C#*/ extends M/*=>types.Test.M#*/ {
val p/*<=types.Test.C#p.*/ = new P/*=>types.P#*/
val x/*<=types.Test.C#x.*/ = p/*=>types.Test.C#p.*/.x/*=>types.P#x.*/
val typeRef1/*<=types.Test.C#typeRef1.*/: C/*=>types.Test.C#*/ = ???/*=>scala.Predef.`???`().*/
val typeRef2/*<=types.Test.C#typeRef2.*/: p/*=>types.Test.C#p.*/.C/*=>types.P#C#*/ = ???/*=>scala.Predef.`???`().*/
val typeRef3/*<=types.Test.C#typeRef3.*/: T/*=>types.T#*/#C/*=>types.T#C#*/ = ???/*=>scala.Predef.`???`().*/
val typeRef4/*<=types.Test.C#typeRef4.*/: List/*=>scala.package.List#*/[Int/*=>scala.Int#*/] = ???/*=>scala.Predef.`???`().*/
val singleType1/*<=types.Test.C#singleType1.*/: x/*=>types.Test.C#x.*/.type = ???/*=>scala.Predef.`???`().*/
val singleType2/*<=types.Test.C#singleType2.*/: p/*=>types.Test.C#p.*/.x/*=>types.P#x.*/.type = ???/*=>scala.Predef.`???`().*/
val Either/*<=types.Test.C#Either.*/ = scala.util.Either/*=>scala.util.Either.*/
val thisType1/*<=types.Test.C#thisType1.*/: this.type = ???/*=>scala.Predef.`???`().*/
val thisType2/*<=types.Test.C#thisType2.*/: C/*=>types.Test.C#*/.this.type = ???/*=>scala.Predef.`???`().*/
val superType1/*<=types.Test.C#superType1.*/ = super.m/*=>types.Test.M#m().*/
val superType2/*<=types.Test.C#superType2.*/ = super[M/*=>types.Test.C#*/].m/*=>types.Test.M#m().*/
val superType3/*<=types.Test.C#superType3.*/ = C/*=>types.Test.C#*/.super[M/*=>types.Test.C#*/].m/*=>types.Test.M#m().*/
val compoundType1/*<=types.Test.C#compoundType1.*/: { def k/*<=local0*/: Int/*=>scala.Int#*/ } = ???/*=>scala.Predef.`???`().*/
val compoundType2/*<=types.Test.C#compoundType2.*/: M/*=>types.Test.M#*/ with N/*=>types.Test.N#*/ = ???/*=>scala.Predef.`???`().*/
val compoundType3/*<=types.Test.C#compoundType3.*/: M/*=>types.Test.M#*/ with N/*=>types.Test.N#*/ { def k/*<=local1*/: Int/*=>scala.Int#*/ } = ???/*=>scala.Predef.`???`().*/
val compoundType4/*<=types.Test.C#compoundType4.*/ = new /*<=local2*/{ def k/*<=local3*/: Int/*=>scala.Int#*/ = ???/*=>scala.Predef.`???`().*/ }
val compoundType5/*<=types.Test.C#compoundType5.*/ = new /*<=local4*/M/*=>types.Test.M#*/ with N/*=>types.Test.N#*/
val compoundType6/*<=types.Test.C#compoundType6.*/ = new /*<=local5*/M/*=>types.Test.M#*/ with N/*=>types.Test.N#*/ { def k/*<=local6*/: Int/*=>scala.Int#*/ = ???/*=>scala.Predef.`???`().*/ }
val annType1/*<=types.Test.C#annType1.*/: T/*=>types.T#*/ @ann/*=>types.ann#*/(42) = ???/*=>scala.Predef.`???`().*/
val annType2/*<=types.Test.C#annType2.*/: T/*=>types.T#*/ @ann1/*=>types.ann1#*/ @ann2/*=>types.ann2#*/ = ???/*=>scala.Predef.`???`().*/
val existentialType1/*<=types.Test.C#existentialType1.*/: T/*=>local7*/ forSome { type T/*<=local7*/ } = ???/*=>scala.Predef.`???`().*/
val existentialType2/*<=types.Test.C#existentialType2.*/: List/*=>scala.package.List#*/[_] = ???/*=>scala.Predef.`???`().*/
val existentialType3/*<=types.Test.C#existentialType3.*/ = Class/*=>java.lang.Class#*/.forName/*=>java.lang.Class#forName().*/("foo.Bar")
val existentialType4/*<=types.Test.C#existentialType4.*/ = Class/*=>java.lang.Class#*/.forName/*=>java.lang.Class#forName().*/("foo.Bar")
def typeLambda1/*<=types.Test.C#typeLambda1().*/[M/*<=types.Test.C#typeLambda1().[M]*/[_]] = ???/*=>scala.Predef.`???`().*/
typeLambda1/*=>types.Test.C#typeLambda1().*/[({ type L/*<=local11*/[T/*<=local12*/] = List/*=>scala.package.List#*/[T/*=>local12*/] })#L]
object ClassInfoType1/*<=types.Test.C#ClassInfoType1.*/
class ClassInfoType2/*<=types.Test.C#ClassInfoType2#*/ extends B/*=>types.B#*/ { def x/*<=types.Test.C#ClassInfoType2#x().*/ = 42 }
trait ClassInfoType3/*<=types.Test.C#ClassInfoType3#*/[T/*<=types.Test.C#ClassInfoType3#[T]*/]
object MethodType/*<=types.Test.C#MethodType.*/ {
def x1/*<=types.Test.C#MethodType.x1().*/: Int/*=>scala.Int#*/ = ???/*=>scala.Predef.`???`().*/
def x2/*<=types.Test.C#MethodType.x2().*/: Int/*=>scala.Int#*/ = ???/*=>scala.Predef.`???`().*/
def m3/*<=types.Test.C#MethodType.m3().*/: Int/*=>scala.Int#*/ = ???/*=>scala.Predef.`???`().*/
def m4/*<=types.Test.C#MethodType.m4().*/(): Int/*=>scala.Int#*/ = ???/*=>scala.Predef.`???`().*/
def m5/*<=types.Test.C#MethodType.m5().*/(x/*<=types.Test.C#MethodType.m5().(x)*/: Int/*=>scala.Int#*/): Int/*=>scala.Int#*/ = ???/*=>scala.Predef.`???`().*/
def m6/*<=types.Test.C#MethodType.m6().*/[T/*<=types.Test.C#MethodType.m6().[T]*/](x/*<=types.Test.C#MethodType.m6().(x)*/: T/*=>types.Test.C#MethodType.m6().[T]*/): T/*=>types.Test.C#MethodType.m6().[T]*/ = ???/*=>scala.Predef.`???`().*/
}
object ByNameType/*<=types.Test.C#ByNameType.*/ {
def m1/*<=types.Test.C#ByNameType.m1().*/(x/*<=types.Test.C#ByNameType.m1().(x)*/: => Int/*=>scala.Int#*/): Int/*=>scala.Int#*/ = ???/*=>scala.Predef.`???`().*/
}
case class RepeatedType/*<=types.Test.C#RepeatedType#*/(s/*<=types.Test.C#RepeatedType#s.*/: String/*=>java.lang.String#*/*) {
def m1/*<=types.Test.C#RepeatedType#m1().*/(x/*<=types.Test.C#RepeatedType#m1().(x)*/: Int/*=>scala.Int#*/*): Int/*=>scala.Int#*/ = s/*=>types.Test.C#RepeatedType#s.*/.length/*=>scala.collection.SeqLike#length().*/
}
object TypeType/*<=types.Test.C#TypeType.*/ {
type T1/*<=types.Test.C#TypeType.T1#*/
def m2/*<=types.Test.C#TypeType.m2().*/[T2/*<=types.Test.C#TypeType.m2().[T2]*/ >: C/*=>types.Test.C#*/ <: C/*=>types.Test.C#*/] = ???/*=>scala.Predef.`???`().*/
def m3/*<=types.Test.C#TypeType.m3().*/[M3/*<=types.Test.C#TypeType.m3().[M3]*/[_]] = ???/*=>scala.Predef.`???`().*/
type T4/*<=types.Test.C#TypeType.T4#*/ = C/*=>types.Test.C#*/
type T5/*<=types.Test.C#TypeType.T5#*/[U/*<=types.Test.C#TypeType.T5#[U]*/] = U/*=>types.Test.C#TypeType.T5#[U]*/
}
}
object Literal/*<=types.Test.Literal.*/ {
final val int/*<=types.Test.Literal.int.*/ = 1
final val long/*<=types.Test.Literal.long.*/ = 1L
final val float/*<=types.Test.Literal.float.*/ = 1f
final val double/*<=types.Test.Literal.double.*/ = 2d
final val nil/*<=types.Test.Literal.nil.*/ = null
final val char/*<=types.Test.Literal.char.*/ = 'a'
final val string/*<=types.Test.Literal.string.*/ = "a"
final val bool/*<=types.Test.Literal.bool.*/ = true
final val unit/*<=types.Test.Literal.unit.*/ = ()
final val javaEnum/*<=types.Test.Literal.javaEnum.*/ = java.nio.file.LinkOption/*=>java.nio.file.LinkOption#*/.NOFOLLOW_LINKS/*=>java.nio.file.LinkOption#NOFOLLOW_LINKS.*/
final val clazzOfInt/*<=types.Test.Literal.clazzOfInt.*/ = classOf/*=>scala.Predef.classOf().*/[Int/*=>scala.Int#*/]
final val clazzOfOption/*<=types.Test.Literal.clazzOfOption.*/ = classOf/*=>scala.Predef.classOf().*/[Option[Int]]
final val clazzOfJStr/*<=types.Test.Literal.clazzOfJStr.*/ = classOf/*=>scala.Predef.classOf().*/[java.lang.String/*=>java.lang.String#*/]
final val clazzOfM/*<=types.Test.Literal.clazzOfM.*/ = classOf/*=>scala.Predef.classOf().*/[M/*=>types.Test.M#*/]
final val clazzOfTRef/*<=types.Test.Literal.clazzOfTRef.*/ = classOf/*=>scala.Predef.classOf().*/[T#C/*=>types.T#C#*/]
}
}
|
scalameta/scalameta
|
tests/jvm/src/test/resources/example/Types.scala
|
Scala
|
bsd-3-clause
| 8,359
|
package io.github.agormindustries.knowledgestand
import cpw.mods.fml.common.event.{FMLPostInitializationEvent, FMLInitializationEvent, FMLPreInitializationEvent}
import cpw.mods.fml.common.{SidedProxy, Mod}
import io.github.agormindustries.knowledgestand.block.BlockKnowledgeStand
import io.github.agormindustries.knowledgestand.core.proxy.CommonProxy
import cpw.mods.fml.common.registry.GameRegistry
import net.minecraftforge.common.MinecraftForge
import io.github.agormindustries.knowledgestand.core.handler.EventHandler
import org.apache.logging.log4j.{Logger, LogManager}
/**
* KnowledgeStand
* Knowledge-Stand
* Created by: MartijnWoudstra
* Date: 20-apr-2014
* License: GPL v3
**/
@Mod(modid = References.Modid, name = References.Modname, version = References.Version, modLanguage = "scala")
object KnowledgeStand {
final val logger: Logger = LogManager.getLogger(References.Modname)
@SidedProxy(clientSide = References.ClientProxyLocation, serverSide = References.CommonProxyLocation)
var proxy: CommonProxy = null
@Mod.EventHandler def preInit(event: FMLPreInitializationEvent) {
GameRegistry.registerBlock(BlockKnowledgeStand, "knowledge_stand")
}
@Mod.EventHandler def init(event: FMLInitializationEvent) {
MinecraftForge.EVENT_BUS.register(EventHandler.interactEvent)
}
@Mod.EventHandler def postInit(event: FMLPostInitializationEvent) {
}
}
/**
* References
* Knowledge-Stand
* Created by: MartijnWoudstra
* Date: 20-apr-2014
* License: GPL v3
**/
object References {
final val Modid = "knowledgestand"
final val Modname = "Knowledge Stand"
final val Version = "0.0.1"
final val ClientProxyLocation = "io.github.agormindustries.knowledgestand.core.proxy.ClientProxy"
final val CommonProxyLocation = "io.github.agormindustries.knowledgestand.core.proxy.CommonProxy"
}
|
AgormIndustries/Knowledge-Stand
|
src/main/scala/io/github/agormindustries/knowledgestand/KnowledgeStand.scala
|
Scala
|
gpl-3.0
| 1,836
|
package scalaprops
import scalaz.std.stream._
import scalaz.std.anyVal._
import ScalapropsScalaz._
object StreamTest extends Scalaprops {
val bindRec = scalazlaws.bindRec.laws[Stream].andThenParam(Param.maxSize(1))
val laws = Properties.list(
scalazlaws.monadPlusStrong.all[Stream],
scalazlaws.align.all[Stream],
scalazlaws.zip.all[Stream],
scalazlaws.isEmpty.all[Stream],
scalazlaws.cobind.all[Stream],
scalazlaws.traverse.all[Stream]
)
}
|
scalaprops/scalaprops
|
scalaz/src/test/scala/scalaprops/StreamTest.scala
|
Scala
|
mit
| 473
|
package net.artsy.atomic
/**
* Typeclass for objects that provide a string scope identifier that can be
* used to categorize them. In the context of AtomicEventStore, the scope
* identifier indicates the atomic log to which the event should be submitted,
* and atomicity is maintained per scope identifier, meaning that events for
* differing scopes can be processed in parallel, but within a scope, they are
* strictly sequential.
*
* This is external to the DomainEvent supertype client code uses so that the
* classes used in the client code need not natively know anything about
* scopes.
*
* @tparam T the underlying type
*/
trait Scoped[T] extends Serializable {
def scopeIdentifier(domainEvent: T): String
}
|
acjay/atomic-store
|
src/main/scala/net/artsy/atomic/Scoped.scala
|
Scala
|
mit
| 731
|
package uk.gov.gds.ier.transaction.crown.dateOfBirth
import uk.gov.gds.ier.validation.{FormKeys, ErrorMessages, ErrorTransformForm}
import uk.gov.gds.ier.validation.constraints.DateOfBirthConstraints
import play.api.data.Forms._
import uk.gov.gds.ier.model.DateOfBirth
import uk.gov.gds.ier.model.noDOB
import uk.gov.gds.ier.model.DOB
import scala.Some
import play.api.data.validation.{Invalid, Valid, Constraint}
import uk.gov.gds.ier.transaction.crown.InprogressCrown
trait DateOfBirthForms extends DateOfBirthCrownConstraints {
self: FormKeys
with ErrorMessages =>
lazy val dobMapping = mapping(
keys.year.key -> text
.verifying("Please enter your year of birth", _.nonEmpty)
.verifying("The year you provided is invalid", year => year.isEmpty || year.matches("\\\\d+")),
keys.month.key -> text
.verifying("Please enter your month of birth", _.nonEmpty)
.verifying("The month you provided is invalid", month => month.isEmpty || month.matches("\\\\d+")),
keys.day.key -> text
.verifying("Please enter your day of birth", _.nonEmpty)
.verifying("The day you provided is invalid", day => day.isEmpty || day.matches("\\\\d+"))
) {
(year, month, day) => DOB(year.toInt, month.toInt, day.toInt)
} {
dateOfBirth =>
Some(
dateOfBirth.year.toString,
dateOfBirth.month.toString,
dateOfBirth.day.toString
)
}.verifying(validDate)
lazy val noDobMapping = mapping(
keys.reason.key -> optional(text),
keys.range.key -> optional(text)
) (
noDOB.apply
) (
noDOB.unapply
)
lazy val dobAndReasonMapping = mapping(
keys.dob.key -> optional(dobMapping),
keys.noDob.key -> optional(noDobMapping)
) (
DateOfBirth.apply
) (
DateOfBirth.unapply
) verifying(dobOrNoDobIsFilled, ifDobEmptyRangeIsValid, ifDobEmptyReasonIsNotEmpty)
val dateOfBirthForm = ErrorTransformForm(
mapping(
keys.dob.key -> optional(dobAndReasonMapping)
) (
dob => InprogressCrown(dob = dob)
) (
inprogress => Some(inprogress.dob)
) verifying dateOfBirthRequiredCrown
)
}
trait DateOfBirthCrownConstraints extends DateOfBirthConstraints{
self: ErrorMessages
with FormKeys =>
lazy val dateOfBirthRequiredCrown = Constraint[InprogressCrown](keys.dob.key) {
application => application.dob match {
case Some(dob) => Valid
case None => Invalid(
"Please enter your date of birth",
keys.dob.dob.day,
keys.dob.dob.month,
keys.dob.dob.year
)
}
}
}
|
michaeldfallen/ier-frontend
|
app/uk/gov/gds/ier/transaction/crown/dateOfBirth/DateOfBirthForms.scala
|
Scala
|
mit
| 2,560
|
package io.mattgates.counterservice.config
import com.typesafe.config.Config
/**
* Created by mgates on 3/4/17.
*/
case class CKiteConfig(address: String, port: Int, bootstrap: Boolean = false)
object CKiteConfig {
def apply(config: Config): CKiteConfig = {
CKiteConfig(
config.getString("bind-address"),
config.getInt("port"),
System.getProperty("ckite.bootstrap", config.getString("bootstrap")).toBoolean
)
}
}
|
mattgates5/challenge
|
src/main/scala/io/mattgates/counterservice/config/CKiteConfig.scala
|
Scala
|
mit
| 450
|
/* ___ _ ___ _ _ *\
** / __| |/ (_) | | The SKilL Generator **
** \__ \ ' <| | | |__ (c) 2013-16 University of Stuttgart **
** |___/_|\_\_|_|____| see LICENSE **
\* */
package de.ust.skill.generator.c.model
import scala.collection.JavaConversions._
import java.io.PrintWriter
import de.ust.skill.generator.c.GeneralOutputMaker
import de.ust.skill.ir.UserType
/**
* @author Fabian Harth, Timm Felden
* @todo rename skill state to skill file
* @todo ensure 80 characters margin
*/
trait StoragePoolHeaderMaker extends GeneralOutputMaker {
abstract override def make {
super.make
val out = files.open(s"model/${prefix}storage_pool.h")
val prefixCapital = packagePrefix.toUpperCase
out.write(s"""
#ifndef ${prefixCapital}STORAGE_POOL_H_
#define ${prefixCapital}STORAGE_POOL_H_
#include <glib.h>
#include <stdbool.h>
#include "../model/${prefix}type_declaration.h"
#include "../model/${prefix}types.h"
#include "../api/${prefix}api.h"
struct ${prefix}storage_pool_struct;
typedef struct ${prefix}storage_pool_struct *${prefix}storage_pool;
/**
* This stores information about instances contained in one type block.
* Instances which are created by the user, but are not yet serialized, do not
* belong to a type block.
*/
typedef struct ${prefix}storage_pool_struct {
/**
* this is the id, this pool has in the binary file (defined by the order,
* in which the types appear in the file). If this pool has not yet been
* written to file, this is set to -1.
*/
int64_t id;
//! This is set to true, if this pool has already been written to a binary file.
bool declared_in_file;
${prefix}type_declaration declaration;
/**
* list of field_information. This only contains fields from this pool's
* declaration. This is empty, unless this pool has already been written to
* file. Unknown fields are not stored at the moment and they will not be
* written when appending.
*/
GList *fields;
//! This is the root of the inheritance tree. May point to itself.
${prefix}storage_pool base_pool;
//! This is the direct super-type
${prefix}storage_pool super_pool;
//! GList of ${prefix}storage_pool. These are only the direct sub-types.
GList *sub_pools;
/**
* If this is a subtype, this field will be set before writing/appending.
* This is the index of the first instance of this type in the instance array
* of the base type.
*/
int64_t lbpsi;
/**
* Array of skill_type*. This only includes instances of this exact type.
* Instances of Sub-Types will be hold by the sub-pools. This contains instances,
* that are already contained in a binary file.
*/
GArray *instances;
/**
* Array of skill_type*. This only includes instances of this exact type.
* Instances of Sub-Types will be hold by the sub-pools. This contains only
* instances, that are user-created and not contained in a binary file.
*/
GArray *new_instances;
} ${prefix}storage_pool_struct;
${prefix}storage_pool ${prefix}storage_pool_new(${prefix}type_declaration declaration);
void ${prefix}storage_pool_destroy(${prefix}storage_pool this);
${prefix}skill_type ${prefix}storage_pool_get_instance_by_id(${prefix}storage_pool this, int64_t skill_id);
//! Returns all instances of this type (including sub-types).
//! The list must be deallocated manually.
GList *${prefix}storage_pool_get_instances(${prefix}storage_pool this);
/**
* Returns instances of this type (including sub-types), that are created by the user,
* and not yet appended to the binary file. The list must be deallocated manually.
*/
GList *${prefix}storage_pool_get_new_instances(${prefix}storage_pool this);
//! This adds a user-created instance to the pool.
void ${prefix}storage_pool_add_instance(${prefix}storage_pool this, ${prefix}skill_type instance);
//! Deleted instances are not deleted immediately, but get their id set to null.
//! This function sets all references to deleted instances to null for instances of this pool.
void ${prefix}storage_pool_remove_null_references(${prefix}storage_pool this);
//! This method does everything to prepare this pool to be written to a binary file:
//! -put old and new instances together in one array
//! -remove deleted instances
//! -calculate lbpsi's
//! -set ids of instances
//! Call this method only on pools of base-types. It will take care of subtypes itself.
void ${prefix}storage_pool_prepare_for_writing(${prefix}storage_pool this);
//! This method does everything to prepare this pool to be appended to a binary file:
//! -check, that previously written instances have not been modified
//! -remove deleted instances, which are not yet written to a file
//! -calculate lbpsi's
//! -set ids of instances
//! Call this method only on pools of base-types. It will take care of subtypes itself.
void ${prefix}storage_pool_prepare_for_appending(${prefix}storage_pool this);
//! For writing/appending, we need pools ordered, so that sub-pools come after their super-pools.
//! This returns all sub-pools of this pool ordered that way, including all subtypes, not only direct subtypes.
GList *${prefix}storage_pool_get_sub_pools(${prefix}storage_pool this);
//! This merges new instances into the old instances after appending
void ${prefix}storage_pool_mark_instances_as_appended(${prefix}storage_pool this);
#endif /* STORAGE_POOL_H_ */
""")
out.close()
}
}
|
skill-lang/skill
|
src/main/scala/de/ust/skill/generator/c/model/StoragePoolHeaderMaker.scala
|
Scala
|
bsd-3-clause
| 5,733
|
/*
* Copyright 2011 Blackened Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.blackenedsystems.core.mongodb
import grizzled.slf4j._
import com.mongodb.casbah._
import com.mongodb.casbah.commons.conversions.scala._
import commons.MongoDBObject
/**
* @author Alan Tibbetts
* @since 24/3/11 11:14 AM
*/
class AuthenticatedDataSource(host: String, port: Int, databaseName: String, userName: String, password: String) extends Logging {
RegisterJodaTimeConversionHelpers()
val mongoConn = MongoConnection(host, port)
val mongoDB = mongoConn(databaseName)
if (!mongoDB.authenticate(userName, password)) {
throw new Exception("Cannot authenticate. Login failed.")
}
info("Initialised dataSource: host: " + host + ", port: " + port + ", database: " + databaseName + ", user: " + userName)
def getConnection = mongoDB
def getCollection(collectionName: String) = mongoDB(collectionName)
/**
* Deletes all documents in the specified collection.
*/
def removeCollection(collectionName: String) = {
val coll = getCollection(collectionName)
coll.remove(MongoDBObject())
}
}
|
betrcode/blackened-core
|
src/main/scala/com/blackenedsystems/core/mongodb/AuthenticatedDataSource.scala
|
Scala
|
apache-2.0
| 1,648
|
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2014 Matthias Langer (t3l@threelights.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.latrobe.blaze
/**
* Serializable object that encapsulates the serializable background state of
* a non serializable object.
*/
abstract class InstanceState
extends Serializable {
def parent
: InstanceState
}
/*
case object InstanceStateRestorationFailedException
extends Exception("Unable to restore state of object. Did you already use it?")
*/
|
bashimao/ltudl
|
blaze/src/main/scala/edu/latrobe/blaze/InstanceState.scala
|
Scala
|
apache-2.0
| 1,050
|
/* Copyright 2009-2016 EPFL, Lausanne */
package leon
package invariant.templateSolvers
import purescala.Definitions._
import purescala.Expressions._
import purescala.Extractors._
import purescala.Types._
import invariant.datastructure._
import invariant.util._
import leon.purescala.TypeOps
import PredicateUtil._
import scala.collection.mutable.{ Set => MutableSet, Map => MutableMap, MutableList }
class UFADTEliminator(ctx: LeonContext, program: Program) {
val debugAliases = false
val makeEfficient = true //this will happen at the expense of completeness
val reporter = ctx.reporter
val verbose = false
def collectCompatibleTerms(terms: Set[Expr]) = {
class Comp(val key: Either[TypedFunDef, TypeTree]) {
override def equals(other: Any) = other match {
case otherComp: Comp => mayAlias(key, otherComp.key)
case _ => false
}
// an weaker property whose equality is necessary for mayAlias
val hashcode =
(key: @unchecked) match {
case Left(TypedFunDef(fd, _)) => fd.id.hashCode()
case Right(ct: CaseClassType) => ct.classDef.id.hashCode()
case Right(tp @ TupleType(tps)) => (tps.hashCode() << 3) ^ tp.dimension
}
override def hashCode = hashcode
}
val compTerms = MutableMap[Comp, MutableList[Expr]]()
terms.foreach { term =>
//an optimization: here we can exclude calls to maxFun from axiomatization, they will be inlined anyway
/*val shouldConsider = if(InvariantisCallExpr(call)) {
val BinaryOperator(_,FunctionInvocation(calledFun,_), _) = call
if(calledFun == DepthInstPhase.maxFun) false
else true
} else true*/
val compKey: Either[TypedFunDef, TypeTree] = term match {
case Equals(_, rhs) => rhs match { // tuple types require special handling before they are used as keys
case tp: Tuple =>
val TupleType(tps) = tp.getType
Right(TupleType(tps.map { TypeOps.bestRealType }))
case FunctionInvocation(tfd, _) => Left(tfd)
case CaseClass(ct, _) => Right(ct)
}
}
val comp = new Comp(compKey)
val compList = compTerms.getOrElse(comp, {
val newl = new MutableList[Expr]()
compTerms += (comp -> newl)
newl
})
compList += term
}
if (debugAliases) {
compTerms.foreach {
case (_, v) => println("Aliases: " + v.mkString("{", ",", "}"))
}
}
compTerms
}
/**
* Convert the theory formula into linear arithmetic formula.
* The calls could be functions calls or ADT constructor calls.
* 'predEval' is an evaluator that evaluates a predicate to a boolean value
* TODO: is type parameter inheritance handled correctly ?
*/
def constraintsForCalls(calls: Set[Expr], predEval: (Expr => Option[Boolean])): Seq[Expr] = {
//check if two calls (to functions or ADT cons) have the same value in the model
def doesAlias(call1: Expr, call2: Expr): Option[Boolean] = {
val Operator(Seq(r1 @ Variable(_), _), _) = call1
val Operator(Seq(r2 @ Variable(_), _), _) = call2
predEval(Equals(r1, r2)) match {
case Some(true) if isCallExpr(call1) =>
val (ants, _) = axiomatizeCalls(call1, call2)
val antsEvals = ants.map(ant => {
val Operator(Seq(lvar @ Variable(_), rvar @ Variable(_)), _) = ant
predEval(Equals(lvar, rvar))
})
// return `false` if at least one argument is false
if (antsEvals.exists(_ == Some(false))) Some(false)
else if (antsEvals.exists(!_.isDefined)) None // here, we cannot decide if the call is true or false
else Some(true)
case r => r
}
}
def predForEquality(call1: Expr, call2: Expr): Seq[Expr] = {
val eqs = if (isCallExpr(call1)) {
val (_, rhs) = axiomatizeCalls(call1, call2)
Seq(rhs)
} else {
val (lhs, rhs) = axiomatizeADTCons(call1, call2)
lhs :+ rhs
}
//remove self equalities.
val preds = eqs.filter {
case Operator(Seq(Variable(lid), Variable(rid)), _) => {
if (lid == rid) false
else {
if (lid.getType == Int32Type || lid.getType == RealType || lid.getType == IntegerType) true
else false
}
}
case e @ _ => throw new IllegalStateException("Not an equality or Iff: " + e)
}
preds
}
def predForDisequality(call1: Expr, call2: Expr): Seq[Expr] = {
val (ants, _) = if (isCallExpr(call1)) {
axiomatizeCalls(call1, call2)
} else {
axiomatizeADTCons(call1, call2)
}
if (makeEfficient && ants.exists {
case Equals(l, r) if (l.getType != RealType && l.getType != BooleanType && l.getType != IntegerType) => true
case _ => false
}) {
Seq()
} else {
var unsatIntEq: Option[Expr] = None
var unsatOtherEq: Option[Expr] = None
ants.foreach(eq =>
if (unsatOtherEq.isEmpty) {
eq match {
case Equals(lhs @ Variable(_), rhs @ Variable(_)) if predEval(Equals(lhs, rhs)) == Some(false) => { // there must exist at least one such predicate
if (lhs.getType != Int32Type && lhs.getType != RealType && lhs.getType != IntegerType)
unsatOtherEq = Some(eq)
else if (unsatIntEq.isEmpty)
unsatIntEq = Some(eq)
}
case _ => ;
}
})
if (unsatOtherEq.isDefined) Seq() //need not add any constraint
else if (unsatIntEq.isDefined) {
//pick the constraint a < b or a > b that is satisfied
val Equals(lhs @ Variable(_), rhs @ Variable(_)) = unsatIntEq.get
val lLTr = LessThan(lhs, rhs)
predEval(lLTr) match {
case Some(true) => Seq(lLTr)
case Some(false) => Seq(GreaterThan(lhs, rhs))
case _ => Seq() // actually this case cannot happen.
}
} else throw new IllegalStateException("All arguments are equal: " + (call1, call2))
}
}
var equivClasses = new DisjointSets[Expr]()
var neqSet = MutableSet[(Expr, Expr)]()
val termClasses = collectCompatibleTerms(calls)
val preds = MutableList[Expr]()
termClasses.foreach {
case (_, compTerms) =>
val vec = compTerms.toArray
val size = vec.size
vec.zipWithIndex.foreach {
case (t1, j) =>
(j + 1 until size).foreach { i =>
val t2 = vec(i)
if (compatibleTArgs(termTArgs(t1), termTArgs(t2))) {
//note: here we omit constraints that encode transitive equality facts
val class1 = equivClasses.findOrCreate(t1)
val class2 = equivClasses.findOrCreate(t2)
if (class1 != class2 && !neqSet.contains((t1, t2)) && !neqSet.contains((t2, t1))) {
doesAlias(t1, t2) match {
case Some(true) =>
equivClasses.union(class1, class2)
preds ++= predForEquality(t1, t2)
case Some(false) =>
neqSet ++= Set((t1, t2))
preds ++= predForDisequality(t1, t2)
case _ =>
// in this case, we construct a weaker disjunct by dropping this predicate
}
}
}
}
}
}
Stats.updateCounterStats(preds.size, "CallADT-Constraints", "disjuncts")
preds.toSeq
}
def termTArgs(t: Expr) = {
t match {
case Equals(_, e) =>
e match {
case FunctionInvocation(TypedFunDef(_, tps), _) => tps
case CaseClass(ct, _) => ct.tps
case tp: Tuple =>
val TupleType(tps) = tp.getType
tps
}
}
}
/**
* This function actually checks if two non-primitive expressions could have the same value
* (when some constraints on their arguments hold).
* Remark: notice that when the expressions have ADT types, then this is basically a form of may-alias check.
* TODO: handling type parameters can become very trickier here.
* For now ignoring type parameters of functions and classes. (This is complete, but may be less efficient)
*/
def mayAlias(term1: Either[TypedFunDef, TypeTree], term2: Either[TypedFunDef, TypeTree]): Boolean = {
(term1, term2) match {
case (Left(TypedFunDef(fd1, _)), Left(TypedFunDef(fd2, _))) =>
fd1.id == fd2.id
case (Right(ct1: CaseClassType), Right(ct2: CaseClassType)) =>
ct1.classDef.id == ct2.classDef.id
case (Right(tp1 @ TupleType(tps1)), Right(tp2 @ TupleType(tps2))) if tp1.dimension == tp2.dimension =>
compatibleTArgs(tps1, tps2) //get the types and check if the types are compatible
case _ => false
}
}
def compatibleTArgs(tps1: Seq[TypeTree], tps2: Seq[TypeTree]): Boolean = {
(tps1 zip tps2).forall {
case (t1, t2) =>
val lub = TypeOps.leastUpperBound(t1, t2)
lub == t1 || lub == t2 // is t1 a super type of t2
}
}
/**
* This procedure generates constraints for the calls to be equal
*/
def axiomatizeCalls(call1: Expr, call2: Expr): (Seq[Expr], Expr) = {
val (v1, fi1, v2, fi2) = {
val Equals(r1, f1 @ FunctionInvocation(_, _)) = call1
val Equals(r2, f2 @ FunctionInvocation(_, _)) = call2
(r1, f1, r2, f2)
}
val ants = (fi1.args.zip(fi2.args)).foldLeft(Seq[Expr]())((acc, pair) => {
val (arg1, arg2) = pair
acc :+ Equals(arg1, arg2)
})
val conseq = Equals(v1, v2)
(ants, conseq)
}
/**
* The returned pairs should be interpreted as a bidirectional implication
*/
def axiomatizeADTCons(sel1: Expr, sel2: Expr): (Seq[Expr], Expr) = {
val (v1, args1, v2, args2) = sel1 match {
case Equals(r1 @ Variable(_), CaseClass(_, a1)) => {
val Equals(r2 @ Variable(_), CaseClass(_, a2)) = sel2
(r1, a1, r2, a2)
}
case Equals(r1 @ Variable(_), Tuple(a1)) => {
val Equals(r2 @ Variable(_), Tuple(a2)) = sel2
(r1, a1, r2, a2)
}
}
val ants = (args1.zip(args2)).foldLeft(Seq[Expr]())((acc, pair) => {
val (arg1, arg2) = pair
acc :+ Equals(arg1, arg2)
})
val conseq = Equals(v1, v2)
(ants, conseq)
}
}
|
epfl-lara/leon
|
src/main/scala/leon/invariant/templateSolvers/UFADTEliminator.scala
|
Scala
|
gpl-3.0
| 10,513
|
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
import play.dev.filewatch.FileWatchService
import play.sbt.run.toLoggerProxy
import sbt._
import javax.net.ssl.{SSLContext, HttpsURLConnection, TrustManager, X509TrustManager}
import java.security.cert.X509Certificate
import scala.annotation.tailrec
import scala.collection.mutable.ListBuffer
import scala.util.Properties
object DevModeBuild {
def jdk7WatchService = Def.setting {
FileWatchService.jdk7(Keys.sLog.value)
}
def jnotifyWatchService = Def.setting {
FileWatchService.jnotify(Keys.target.value)
}
// Using 30 max attempts so that we can give more chances to
// the file watcher service. This is relevant when using the
// default JDK watch service which does uses polling.
val MaxAttempts = 30
val WaitTime = 500l
val ConnectTimeout = 10000
val ReadTimeout = 10000
private val trustAllManager = {
val manager = new X509TrustManager() {
def getAcceptedIssuers: Array[X509Certificate] = null
def checkClientTrusted(certs: Array[X509Certificate], authType: String): Unit = {}
def checkServerTrusted(certs: Array[X509Certificate], authType: String): Unit = {}
}
Array[TrustManager](manager)
}
@tailrec
def verifyResourceContains(path: String, status: Int, assertions: Seq[String], attempts: Int, headers: (String, String)*): Unit = {
println(s"Attempt $attempts at $path")
val messages = ListBuffer.empty[String]
try {
val sc = SSLContext.getInstance("SSL")
sc.init(null, trustAllManager, null)
HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory)
val url = new java.net.URL("https://localhost:9443" + path)
val conn = url.openConnection().asInstanceOf[java.net.HttpURLConnection]
conn.setConnectTimeout(ConnectTimeout)
conn.setReadTimeout(ReadTimeout)
headers.foreach(h => conn.setRequestProperty(h._1, h._2))
if (status == conn.getResponseCode) messages += s"Resource at $path returned $status as expected" else throw new RuntimeException(s"Resource at $path returned ${conn.getResponseCode} instead of $status")
val is = if (conn.getResponseCode >= 400) conn.getErrorStream else conn.getInputStream
// The input stream may be null if there's no body
val contents = if (is != null) {
val c = IO.readStream(is)
is.close()
c
} else ""
conn.disconnect()
assertions.foreach { assertion =>
if (contents.contains(assertion)) messages += s"Resource at $path contained $assertion" else throw new RuntimeException(s"Resource at $path didn't contain '$assertion':\\n$contents")
}
messages.foreach(println)
} catch {
case e: Exception =>
println(s"Got exception: $e")
if (attempts < MaxAttempts) {
Thread.sleep(WaitTime)
verifyResourceContains(path, status, assertions, attempts + 1, headers: _*)
} else {
messages.foreach(println)
println(s"After $attempts attempts:")
throw e
}
}
}
}
|
Shenker93/playframework
|
framework/src/sbt-plugin/src/sbt-test/play-sbt-plugin/generated-keystore/project/Build.scala
|
Scala
|
apache-2.0
| 3,102
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.