code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package idv.brianhsu.maidroid.plurk.util
import idv.brianhsu.maidroid.ui.util.AsyncUI._
import idv.brianhsu.maidroid.plurk.view._
import scala.concurrent._
import android.graphics.drawable.Drawable
import android.widget.EditText
import android.widget.TextView
import android.text.Spanned
import android.text.style.ImageSpan
import android.text.style.DynamicDrawableSpan
import org.bone.soplurk.api.PlurkAPI
import org.bone.soplurk.model.Icon
import org.bone.soplurk.constant.WritableCommentSetting
import org.bone.soplurk.constant.Qualifier
import android.text.Editable
import android.text.TextWatcher
import android.graphics.Color
object PlurkEditor {
object NoContentException extends Exception("No Content")
}
trait PlurkEditor {
protected def plurkAPI: PlurkAPI
protected def contentEditorHolder: Option[EditText]
protected def qualifierSpinnerHolder: Option[QualifierSpinner]
protected def responseTypeSpinnerHolder: Option[ResponseTypeSpinner]
protected def charCounterHolder: Option[TextView]
protected def limitedTo: List[Long] = Nil
protected def maxTextLength = 210
protected def shareSettingPostfix: String = ""
def updateCharCounter() {
for {
contentEditor <- contentEditorHolder
charCounter <- charCounterHolder
} {
val remainChars = maxTextLength - contentEditor.getText.length
charCounter.setText(remainChars.toString)
if (remainChars < 0) {
charCounter.setTextColor(Color.rgb(255, 0, 0))
} else {
charCounter.setTextColor(Color.rgb(255, 255, 255))
}
}
}
protected def setupCharCounter() {
for {
contentEditor <- contentEditorHolder
charCounter <- charCounterHolder
} {
contentEditor.addTextChangedListener(new TextWatcher() {
override def beforeTextChanged(s: CharSequence, start: Int, count: Int, after: Int) {}
override def onTextChanged(s: CharSequence, start: Int, before: Int, count: Int) {}
override def afterTextChanged (editable: Editable) {
updateCharCounter()
}
})
}
}
def setSelected(cliques: Set[String], users: Set[(Long, String)]) {}
def setBlocked(cliques: Set[String], users: Set[(Long, String)]) {}
def getContentLength = contentEditorHolder.map(_.getText.toString.size) getOrElse 0
def setEditorContent(content: (Editable, Int)) {
contentEditorHolder.foreach { editor =>
editor.setText(content._1, android.widget.TextView.BufferType.SPANNABLE)
editor.setSelection(content._2)
}
}
def setEditorContent(content: String) {
contentEditorHolder.foreach { editor =>
editor.setText(content)
}
}
def getEditorContent = contentEditorHolder.map { editor =>
(editor.getText, editor.getSelectionStart.max(0))
}
def insertDrawable(originString: String, drawable: Drawable) {
contentEditorHolder.foreach { editor =>
drawable.setBounds(0, 0, drawable.getIntrinsicWidth(), drawable.getIntrinsicHeight())
val imageSpan = new ImageSpan(drawable, DynamicDrawableSpan.ALIGN_BASELINE)
val start = editor.getSelectionStart.max(0)
val end = editor.getSelectionEnd.max(0)
val message = editor.getEditableText
message.replace(start, end, originString)
message.setSpan(imageSpan, start, start + originString.length, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE)
}
}
def insertIcon(icon: Icon, drawableHolder: Option[Drawable]) {
drawableHolder match {
case Some(drawable) => insertDrawable(s" ${icon.insertText} ", drawable)
case None => insertText(icon.insertText)
}
}
def insertText(text: String) {
contentEditorHolder.foreach { editor =>
editor.getEditableText.insert(editor.getSelectionStart.max(0), text)
}
}
def postPlurk() = Future {
val isEmpty = contentEditorHolder.map(_.getText.toString.trim.isEmpty).getOrElse(true)
if (isEmpty) {
throw PlurkEditor.NoContentException
}
val content = contentEditorHolder.map(_.getText.toString + shareSettingPostfix).getOrElse("")
val language = plurkAPI.Users.currUser.get._1.basicInfo.defaultLanguage
val qualifier = qualifierSpinnerHolder.map(_.getSelectedQualifier).getOrElse(Qualifier.::)
val commentSetting = responseTypeSpinnerHolder.map(_.getSelectedCommentSetting).getOrElse(None)
plurkAPI.Timeline.plurkAdd(
content, qualifier,
limitedTo, commentSetting, Some(language)
).get
}
def postResponse (plurkID: Long) = Future {
val isEmpty = contentEditorHolder.map(_.getText.toString.trim.isEmpty).getOrElse(true)
if (isEmpty) {
throw PlurkEditor.NoContentException
}
val content = contentEditorHolder.map(_.getText.toString).getOrElse("")
val qualifier = qualifierSpinnerHolder.map(_.getSelectedQualifier).getOrElse(Qualifier.::)
plurkAPI.Responses.responseAdd(plurkID, content, qualifier).get
}
}
| brianhsu/MaidroidPlurk | src/main/scala/util/PlurkEditor.scala | Scala | gpl-3.0 | 4,904 |
/*
* Copyright (c) 2021-2022 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.iglu.schemaddl.bigquery
import io.circe._
import com.snowplowanalytics.iglu.schemaddl.jsonschema.Schema
import com.snowplowanalytics.iglu.schemaddl.jsonschema.properties.{CommonProperties, StringProperty}
object Suggestion {
val stringSuggestion: Suggestion = (schema, required) =>
schema.`type` match {
case Some(CommonProperties.Type.String) =>
Some(name => Field(name, Type.String, Mode.required(required)))
case Some(types) if types.nullable(CommonProperties.Type.String) =>
Some(name => Field(name, Type.String, Mode.Nullable) )
case _ => None
}
val booleanSuggestion: Suggestion = (schema, required) =>
schema.`type` match {
case Some(CommonProperties.Type.Boolean) =>
Some(name => Field(name, Type.Boolean, Mode.required(required)))
case Some(CommonProperties.Type.Union(types)) if withNull(types, CommonProperties.Type.Boolean) =>
Some(name => Field(name, Type.Boolean, Mode.Nullable))
case _ => None
}
val integerSuggestion: Suggestion = (schema, required) =>
schema.`type` match {
case Some(CommonProperties.Type.Integer) =>
Some(name => Field(name, Type.Integer, Mode.required(required)))
case Some(CommonProperties.Type.Union(types)) if withNull(types, CommonProperties.Type.Integer) =>
Some(name => Field(name, Type.Integer, Mode.Nullable))
case _ => None
}
val floatSuggestion: Suggestion = (schema, required) =>
schema.`type` match {
case Some(CommonProperties.Type.Number) =>
Some(name => Field(name, Type.Float, Mode.required(required)))
case Some(CommonProperties.Type.Union(types)) if onlyNumeric(types, true) =>
Some(name => Field(name, Type.Float, Mode.Nullable))
case Some(CommonProperties.Type.Union(types)) if onlyNumeric(types, false) =>
Some(name => Field(name, Type.Float, Mode.required(required)))
case Some(CommonProperties.Type.Union(types)) if withNull(types, CommonProperties.Type.Number) =>
Some(name => Field(name, Type.Float, Mode.Nullable))
case _ => None
}
val complexEnumSuggestion: Suggestion = (schema, required) =>
schema.enum match {
case Some(CommonProperties.Enum(values)) =>
Some(fromEnum(values, required))
case _ => None
}
// `date-time` format usually means zoned format, which corresponds to BQ Timestamp
val timestampSuggestion: Suggestion = (schema, required) =>
(schema.`type`, schema.format) match {
case (Some(CommonProperties.Type.String), Some(StringProperty.Format.DateFormat)) =>
Some(name => Field(name, Type.Date, Mode.required(required)))
case (Some(CommonProperties.Type.Union(types)), Some(StringProperty.Format.DateFormat)) if withNull(types, CommonProperties.Type.String) =>
Some(name => Field(name, Type.Date, Mode.Nullable))
case (Some(CommonProperties.Type.String), Some(StringProperty.Format.DateTimeFormat)) =>
Some(name => Field(name, Type.Timestamp, Mode.required(required)))
case (Some(CommonProperties.Type.Union(types)), Some(StringProperty.Format.DateTimeFormat)) if withNull(types, CommonProperties.Type.String) =>
Some(name => Field(name, Type.Timestamp, Mode.Nullable))
case _ => None
}
def finalSuggestion(schema: Schema, required: Boolean): String => Field =
schema.`type` match {
case Some(jsonType) if jsonType.nullable =>
name => Field(name, Type.String, Mode.Nullable)
case _ =>
name => Field(name, Type.String, Mode.required(required))
}
val suggestions: List[Suggestion] = List(
timestampSuggestion,
booleanSuggestion,
stringSuggestion,
integerSuggestion,
floatSuggestion,
complexEnumSuggestion
)
private[iglu] def fromEnum(enums: List[Json], required: Boolean): String => Field = {
def isString(json: Json) = json.isString || json.isNull
def isInteger(json: Json) = json.asNumber.exists(_.toBigInt.isDefined) || json.isNull
def isNumeric(json: Json) = json.isNumber || json.isNull
val noNull: Boolean = !enums.contains(Json.Null)
if (enums.forall(isString)) {
name => Field(name, Type.String, Mode.required(required && noNull))
} else if (enums.forall(isInteger)) {
name => Field(name, Type.Integer, Mode.required(required && noNull))
} else if (enums.forall(isNumeric)) {
name => Field(name, Type.Float, Mode.required(required && noNull))
} else {
name => Field(name, Type.String, Mode.required(required && noNull))
}
}
private def withNull(types: Set[CommonProperties.Type], t: CommonProperties.Type): Boolean =
types == Set(t, CommonProperties.Type.Null) || types == Set(t)
private def onlyNumeric(types: Set[CommonProperties.Type], allowNull: Boolean): Boolean =
if (allowNull) types == Set(CommonProperties.Type.Number, CommonProperties.Type.Integer, CommonProperties.Type.Null)
else types == Set(CommonProperties.Type.Number, CommonProperties.Type.Integer)
}
| snowplow/schema-ddl | modules/core/src/main/scala/com.snowplowanalytics/iglu.schemaddl/bigquery/Suggestion.scala | Scala | apache-2.0 | 5,760 |
/*
* Copyright 2014–2020 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qscript.rewrites
import quasar._
import quasar.api.resource.ResourcePath
import quasar.fp._
import quasar.contrib.iota._
import quasar.contrib.iota.SubInject
import quasar.qscript._
import matryoshka.data.Fix
import pathy.Path._
import scalaz._, Scalaz._
object NormalizeSpec extends quasar.Qspec {
import IdStatus.ExcludeId
type QST[A] = QScriptTotal[Fix, A]
type QSNorm[A] = QScriptNormalized[Fix, A]
def normalizeExpr(expr: Fix[QSNorm]): Fix[QSNorm] =
NormalizeQScriptFreeMap(NormalizeQScript[Fix](expr))
implicit def normalizedToTotal: Injectable[QSNorm, QST] =
SubInject[QSNorm, QST]
val qsdsl = construction.mkDefaults[Fix, QSNorm]
"rewriter" should {
import qsdsl._
// select b[*] + c[*] from intArrays.data
"normalize static projections in shift coalescing" in {
val educated =
fix.Map(
fix.Map(
fix.LeftShift(
fix.LeftShift(
fix.Read[ResourcePath](ResourcePath.leaf(rootDir </> file("intArrays")), ExcludeId),
recFunc.ProjectKeyS(recFunc.Hole, "b"),
ExcludeId,
ShiftType.Array,
OnUndefined.Emit,
func.ConcatMaps(
func.MakeMapS("original", func.LeftSide),
func.MakeMapS("0", func.RightSide))),
recFunc.ProjectKeyS(recFunc.ProjectKeyS(recFunc.Hole, "original"), "c"),
ExcludeId,
ShiftType.Array,
OnUndefined.Emit,
func.ConcatMaps(
func.LeftSide,
func.MakeMapS("1", func.RightSide))),
recFunc.ConcatMaps(
recFunc.MakeMapS("0", recFunc.ProjectKeyS(recFunc.Hole, "0")),
recFunc.MakeMapS("1", recFunc.ProjectKeyS(recFunc.Hole, "1")))),
recFunc.Add(
recFunc.ProjectKeyS(recFunc.Hole, "0"),
recFunc.ProjectKeyS(recFunc.Hole, "1")))
val normalized =
fix.LeftShift(
fix.LeftShift(
fix.Read[ResourcePath](ResourcePath.leaf(rootDir </> file("intArrays")), ExcludeId),
recFunc.ProjectKeyS(recFunc.Hole, "b"),
ExcludeId,
ShiftType.Array,
OnUndefined.Emit,
func.ConcatMaps(
func.MakeMapS("original", func.LeftSide),
func.MakeMapS("0", func.RightSide))),
recFunc.ProjectKeyS(recFunc.ProjectKeyS(recFunc.Hole, "original"), "c"),
ExcludeId,
ShiftType.Array,
OnUndefined.Emit,
func.Add(
func.ProjectKeyS(func.LeftSide, "0"),
func.RightSide))
normalizeExpr(educated) must equal(normalized)
}
// select (select a, b from zips).a + (select a, b from zips).b
"normalize static projections in a contrived example" in {
val educated =
fix.Map(
fix.Map(
fix.Read[ResourcePath](ResourcePath.leaf(rootDir </> file("zips")), ExcludeId),
recFunc.ConcatMaps(
recFunc.MakeMapS("a", recFunc.ProjectKeyS(recFunc.Hole, "a")),
recFunc.MakeMapS("b", recFunc.ProjectKeyS(recFunc.Hole, "b")))),
recFunc.Add(
recFunc.ProjectKeyS(recFunc.Hole, "a"),
recFunc.ProjectKeyS(recFunc.Hole, "b")))
val normalized =
fix.Map(
fix.Read[ResourcePath](ResourcePath.leaf(rootDir </> file("zips")), ExcludeId),
recFunc.Add(
recFunc.ProjectKeyS(recFunc.Hole, "a"),
recFunc.ProjectKeyS(recFunc.Hole, "b")))
normalizeExpr(educated) must equal(normalized)
}
"normalize within a Union" in {
val educated =
fix.Union(
fix.Unreferenced,
free.Map(
free.Map(
free.Read[ResourcePath](ResourcePath.leaf(rootDir </> file("zips1")), ExcludeId),
recFunc.ConcatMaps(
recFunc.MakeMapS("a", recFunc.ProjectKeyS(recFunc.Hole, "a")),
recFunc.MakeMapS("b", recFunc.ProjectKeyS(recFunc.Hole, "b")))),
recFunc.Add(
recFunc.ProjectKeyS(recFunc.Hole, "a"),
recFunc.ProjectKeyS(recFunc.Hole, "b"))),
free.Map(
free.Map(
free.Read[ResourcePath](ResourcePath.leaf(rootDir </> file("zips2")), ExcludeId),
recFunc.ConcatMaps(
recFunc.MakeMapS("c", recFunc.ProjectKeyS(recFunc.Hole, "c")),
recFunc.MakeMapS("d", recFunc.ProjectKeyS(recFunc.Hole, "d")))),
recFunc.Add(
recFunc.ProjectKeyS(recFunc.Hole, "c"),
recFunc.ProjectKeyS(recFunc.Hole, "d"))))
val normalized =
fix.Union(
fix.Unreferenced,
free.Map(
free.Read[ResourcePath](ResourcePath.leaf(rootDir </> file("zips1")), ExcludeId),
recFunc.Add(
recFunc.ProjectKeyS(recFunc.Hole, "a"),
recFunc.ProjectKeyS(recFunc.Hole, "b"))),
free.Map(
free.Read[ResourcePath](ResourcePath.leaf(rootDir </> file("zips2")), ExcludeId),
recFunc.Add(
recFunc.ProjectKeyS(recFunc.Hole, "c"),
recFunc.ProjectKeyS(recFunc.Hole, "d"))))
normalizeExpr(educated) must equal(normalized)
}
"elide no-op Map" >> {
val path = ResourcePath.leaf(rootDir </> file("foo"))
"elide outer no-op Map" >> {
val src =
fix.Read[ResourcePath](path, ExcludeId)
normalizeExpr(fix.Map(src, recFunc.Hole)) must equal(src)
}
"elide nested no-op Map" >> {
val src =
fix.Map(
fix.Read[ResourcePath](path, ExcludeId),
recFunc.ProjectKeyS(recFunc.Hole, "bar"))
val qs =
fix.Filter(
fix.Map(src, recFunc.Hole),
recFunc.ProjectKeyS(recFunc.Hole, "baz"))
val expected =
fix.Filter(
src,
recFunc.ProjectKeyS(recFunc.Hole, "baz"))
normalizeExpr(qs) must equal(expected)
}
"elide double no-op Map" >> {
val src =
fix.Read[ResourcePath](path, ExcludeId)
normalizeExpr(fix.Map(fix.Map(src, recFunc.Hole), recFunc.Hole)) must equal(src)
}
}
}
}
| slamdata/quasar | qscript/src/test/scala/quasar/qscript/rewrites/NormalizeSpec.scala | Scala | apache-2.0 | 6,901 |
package pump.uno.actor
import akka.actor.{Actor, ActorRef, Props, Terminated}
import pump.uno.Settings
import pump.uno.model.{Fetch, Forum, ForumPage, Topic}
import pump.util.{ActorLogging, Loggable}
import spray.http.HttpCookie
trait ForumPageActor extends Actor with Loggable {
def settings: Settings
def createForumPageFetcherActor: ActorRef
def createForumPageActor: ActorRef
def createTopicPageActor: ActorRef
override def receive: Receive = {
case message: Fetch =>
createForumPageFetcherActor ! message
context.become(waitingForPage(message.auth))
}
def waitingForPage(auth: HttpCookie): Receive = {
case page: ForumPage =>
val children = page.forums.length + page.topics.length
if (children == 0) {
log.debug(s" no children -> stopping right now")
context.stop(self)
} else {
log.debug(s" waiting for $children children to finish")
context.become(waitingForChildren(children))
}
processForums(auth, page.forums)
processTopics(auth, page.topics)
}
private def processForums(auth: HttpCookie, forums: Seq[Forum]) {
log.debug(s" processing ${forums.length} subforums")
forums.foreach { forum =>
val forumPageActor = createForumPageActor
context.watch(forumPageActor)
forumPageActor ! Fetch(settings.root + forum.url, auth)
}
}
private def processTopics(auth: HttpCookie, topics: Seq[Topic]) {
log.debug(s" processing ${topics.length} topics")
topics.foreach { topic =>
val topicPageActor = createTopicPageActor
context.watch(topicPageActor)
topicPageActor ! Fetch(settings.root + topic.url, auth)
}
}
def waitingForChildren(children: Int): Receive = {
case Terminated(_) =>
if (children == 1) context.stop(self)
else context.become(waitingForChildren(children - 1))
}
}
class ForumPageActorImpl extends ForumPageActor with ActorLogging {
override lazy val settings = Settings(context.system)
override def createForumPageFetcherActor = context.actorOf(Props[ForumPageFetcherActorImpl], "fpf")
override def createForumPageActor = context.actorOf(Props[ForumPageActorImpl])
override def createTopicPageActor = context.actorOf(Props[TopicPageActorImpl])
}
| gkonst/pump | src/main/scala/pump/uno/actor/ForumPageActor.scala | Scala | mit | 2,273 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import uk.gov.hmrc.ct.box._
case class AC7400(value: Option[Boolean]) extends CtBoxIdentifier(name = "Enter Financial commitments note?")
with CtOptionalBoolean
with Input
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC7400.scala | Scala | apache-2.0 | 828 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600e.v2
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.ct600e.v2.retriever.CT600EBoxRetriever
case class E20a(value: Option[Int]) extends CtBoxIdentifier("Tangible fixed assets (Held at the end of the period)") with CtOptionalInteger with Input with ValidatableBox[CT600EBoxRetriever]{
override def validate(boxRetriever: CT600EBoxRetriever): Set[CtValidation] = validateZeroOrPositiveInteger(this)
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600e/v2/E20a.scala | Scala | apache-2.0 | 1,038 |
/*
* Copyright (C) 2017 Vincibean <Andrea Bessi>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.vincibean.scala.impatient.chapter15.exercise2
/**
* Make an example class that shows every possible position of an annotation.
* Use @deprecated as your sample annotation.
*/
@deprecated
object Deprecation {
@deprecated
val isDeprecated = true
}
@deprecated
class Deprecation(@deprecated val version: Int, @deprecated name: String) {
@deprecated
def deprecate(): Unit = println("You're deprecate, dude!")
@deprecated(message = "You should not use this anymore", since = "always")
def deprecateAgain(@deprecated message: String): Unit = println(message)
}
| Vincibean/ScalaForTheImpatient-Solutions | src/main/scala/org/vincibean/scala/impatient/chapter15/exercise2/Deprecation.scala | Scala | gpl-3.0 | 1,306 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testing.interface
import scala.scalajs.js
import scala.scalajs.reflect.Reflect
import sbt.testing.Framework
private[interface] object FrameworkLoader {
def loadFramework(frameworkName: String): Framework = {
val clazz = Reflect.lookupInstantiatableClass(frameworkName).getOrElse {
throw new InstantiationError(frameworkName)
}
clazz.newInstance().asInstanceOf[Framework]
}
def detectFrameworkNames(names: List[List[String]]): List[Option[String]] = {
def frameworkExists(name: String): Boolean = {
Reflect.lookupInstantiatableClass(name).exists { clazz =>
classOf[sbt.testing.Framework].isAssignableFrom(clazz.runtimeClass)
}
}
for (frameworkNames <- names)
yield frameworkNames.find(frameworkExists(_))
}
def tryLoadFramework(names: List[String]): Option[Framework] = {
def tryLoad(name: String): Option[Framework] = {
Reflect.lookupInstantiatableClass(name).collect {
case clazz if classOf[Framework].isAssignableFrom(clazz.runtimeClass) =>
clazz.newInstance().asInstanceOf[Framework]
}
}
names.iterator.map(tryLoad).collectFirst {
case Some(framework) => framework
}
}
}
| nicolasstucki/scala-js | test-interface/src/main/scala/org/scalajs/testing/interface/FrameworkLoader.scala | Scala | apache-2.0 | 1,496 |
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package swave.core.tck
import org.reactivestreams.Publisher
import swave.core._
// due to long runtime this test is disabled by default, remove parameter to enable the test
class ToPublisherDrainSpec(dontRun: Any) extends SwavePublisherVerification[Int] {
implicit val env = StreamEnv()
def createPublisher(elements: Long): Publisher[Int] =
Spout.ints(0).take(elements).drainTo(Drain.toPublisher()).get
}
| sirthias/swave | core-tests/src/test/scala/swave/core/tck/ToPublisherDrainSpec.scala | Scala | mpl-2.0 | 627 |
package com.eigengo.lift.profile
import java.util.UUID
import akka.actor.ActorRef
import com.eigengo.lift.common.{CommonMarshallers, CommonPathDirectives}
import com.eigengo.lift.notification.NotificationProtocol.{AndroidDevice, IOSDevice}
import com.eigengo.lift.profile.UserProfileProcessor._
import com.eigengo.lift.profile.UserProfileProtocol._
import spray.http._
import spray.routing.Directives
import scala.concurrent.ExecutionContext
import scala.util.{Failure, Success}
trait ProfileService extends Directives with CommonMarshallers with CommonPathDirectives {
import akka.pattern.ask
import com.eigengo.lift.common.Timeouts.defaults._
def userProfileRoute(userProfile: ActorRef, userProfileProcessor: ActorRef)(implicit ec: ExecutionContext) =
path("user") {
post {
handleWith { register: UserRegister ⇒
(userProfileProcessor ? register).mapRight[UUID]
}
} ~
put {
handleWith { login: UserLogin ⇒
(userProfileProcessor ? login).mapRight[UUID]
}
}
} ~
path("user" / UserIdValue) { userId ⇒
get {
complete {
(userProfile ? UserGetPublicProfile(userId)).mapNoneToEmpty[PublicProfile]
}
} ~
post {
handleWith { publicProfile: PublicProfile ⇒
(userProfileProcessor ? UserSetPublicProfile(userId, publicProfile)).mapRight[Unit]
}
}
} ~
path("user" / UserIdValue / "check") { userId ⇒
get {
complete {
(userProfileProcessor ? UserCheckAccount(userId)).mapTo[Boolean].map { x ⇒
if (x) HttpResponse(StatusCodes.OK) else HttpResponse(StatusCodes.NotFound)
}
}
}
} ~
path("user" / UserIdValue / "image") { userId ⇒
get {
complete {
(userProfile ? UserGetProfileImage(userId)).mapTo[Option[Array[Byte]]].map { x ⇒
HttpResponse(entity = HttpEntity(contentType = ContentType(MediaTypes.`image/png`), bytes = x.getOrElse(Array.empty)))
}
}
} ~
post {
ctx ⇒
val image = ctx.request.entity.data.toByteArray
(userProfileProcessor ? UserSetProfileImage(userId, image)).onComplete {
case Success(_) ⇒ ctx.complete(HttpResponse(StatusCodes.OK))
case Failure(_) ⇒ ctx.complete(HttpResponse(StatusCodes.InternalServerError))
}
}
} ~
path("user" / UserIdValue / "device" / "ios") { userId ⇒
post {
handleWith { device: IOSDevice ⇒
(userProfileProcessor ? UserSetDevice(userId, device)).mapRight[Unit]
}
}
} ~
path("user" / UserIdValue / "device" / "android") { userId ⇒
post {
handleWith { device: AndroidDevice ⇒
(userProfileProcessor ? UserSetDevice(userId, device)).mapRight[Unit]
}
}
}
}
| lachatak/lift | server/profile/src/main/scala/com/eigengo/lift/profile/ProfileService.scala | Scala | apache-2.0 | 2,890 |
package sangria.execution.batch
import sangria.ast.AstLocation
import sangria.execution.{ExecutionError, Executor, QueryAnalysisError, WithViolations}
import sangria.ast.SourceMapper
import sangria.validation.{AstNodeViolation, Violation}
case class BatchExecutionError(message: String, eh: Executor.ExceptionHandler)
extends ExecutionError(message, eh)
with QueryAnalysisError
case class VariableDefinitionInferenceViolation(
operationName: String,
variableName: String,
type1: String,
type2: String,
sourceMapper: Option[SourceMapper],
locations: List[AstLocation])
extends AstNodeViolation {
lazy val simpleErrorMessage =
s"Inferred variable '$$$variableName' in operation '$operationName' is used with two conflicting types: '$type1' and '$type2'."
}
case class UndefinedVariableDefinitionViolation(
operationName: String,
variableName: String,
sourceMapper: Option[SourceMapper],
locations: List[AstLocation])
extends AstNodeViolation {
lazy val simpleErrorMessage =
s"Variable '$$$variableName' is not defined in the operation '$operationName'."
}
case class CircularOperationDependencyViolation(
operationName: String,
path: Vector[String],
sourceMapper: Option[SourceMapper],
locations: List[AstLocation])
extends AstNodeViolation {
lazy val simpleErrorMessage =
s"Operation '$operationName' has a circular dependency at path '${path.mkString(" -> ")} -> $operationName'."
}
case class BatchExecutionViolationError(
violations: Vector[Violation],
eh: Executor.ExceptionHandler)
extends ExecutionError(
s"Invalid batch query. Violations:\\n\\n${violations.map(_.errorMessage).mkString("\\n\\n")}",
eh)
with QueryAnalysisError
with WithViolations
| sangria-graphql/sangria | modules/core/src/main/scala/sangria/execution/batch/batchViolations.scala | Scala | apache-2.0 | 1,781 |
/*
* MakeList.scala
* An element representing making a list of a random number of random items.
*
* Created By: Avi Pfeffer (apfeffer@cra.com)
* Creation Date: Oct 17, 2011
*
* Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email figaro@cra.com for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.library.compound
import com.cra.figaro.algorithm.ValuesMaker
import com.cra.figaro.algorithm.lazyfactored.{ ValueSet, LazyValues, Regular }
import com.cra.figaro.algorithm.factored.factors._
import com.cra.figaro.language._
import com.cra.figaro.util._
import scala.collection.mutable.Map
import com.cra.figaro.library.collection.VariableSizeArray
import com.cra.figaro.library.collection.FixedSizeArrayElement
import com.cra.figaro.library.collection.FixedSizeArray
import com.cra.figaro.library.collection.MakeArray
/**
* An element representing making a list of a random number of random items.
* The first argument is an element representing the number of items.
* The second argument is an expression that generates an element representing an item.
* MakeList is designed to store all the item elements and not change them as the number of elements changes.
*
* @param numItems The element representing the number of items in the list
* @param itemMaker A function that creates an element representing a single item in the list
* @deprecated("MakeList is deprecated. Please use the collections library for future support of MakeList capabilities", "3.2.1")
*/
@deprecated("MakeList is deprecated. Please use the collections library for future support of MakeList capabilities", "3.2.1")
class MakeList[T](name: Name[List[T]], vsa: FixedSizeArrayElement[T], collection: ElementCollection)
extends Apply1[List[T], List[T]](name, vsa.foldLeft(List[T]())((c: List[T], n: T) => c :+ n), (l: List[T]) => l, collection) {
val numItems = vsa.fsa.asInstanceOf[MakeArray[T]].numItems
def items = vsa.fsa.value.generate(vsa.fsa.value.indices.toList).map(_._2).toStream
def apply(i: Int) = i < vsa.fsa.value.size match {
case true => vsa.fsa.value(i)
case _ => throw new IllegalArgumentException("Invalid indices to MakeList")
}
}
object MakeList {
/**
* Create a MakeList element using numItems to determine the number of items
* and itemMaker to create each item in the list.
*/
@deprecated("MakeList is deprecated. Please use the collections library for future support of MakeList capabilities", "3.2.1")
def apply[T](numItems: Element[Int], itemMaker: () => Element[T])(implicit name: Name[List[T]], collection: ElementCollection) = {
val vsa = VariableSizeArray(numItems, (i: Int) => itemMaker())("", collection)
new MakeList(name, vsa, collection)
}
}
| jyuhuan/figaro | Figaro/src/main/scala/com/cra/figaro/library/compound/MakeList.scala | Scala | bsd-3-clause | 2,857 |
package com.gu.cas
case class Config(config: com.typesafe.config.Config) {
val emergencySubscriberAuthPrefix = config.getString("emergency.subscriber.auth.prefix")
}
| guardian/content-authorisation-common | src/main/scala/com/gu/cas/Config.scala | Scala | apache-2.0 | 169 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.cli
import com.google.common.io.Files
import java.io.File
import org.apache.spark.rdd.RDD
import org.bdgenomics.adam.rdd.ADAMContext._
import org.bdgenomics.adam.util.ADAMFunSuite
import org.bdgenomics.formats.avro.AlignmentRecord
class Adam2FastqSuite extends ADAMFunSuite {
sparkTest("convert SAM to paired FASTQ") {
val readsFilepath = resourcePath("bqsr1.sam")
// The following fastq files were generated by Picard's SamToFastq
// First generate mapped on SAM file (sorted by readName to make the comparison easier)
// samtools view -H adam-core/src/test/resources/bqsr1.sam > adam-core/src/test/resources/bqsr1-readnamesorted.sam
// samtools view -F 4 adam-core/src/test/resources/bqsr1.sam | sort -k1 >> adam-core/src/test/resources/bqsr1-readnamesorted.sam
// java -jar picard.jar
// SamToFastq
// I=adam-core/src/test/resources/bqsr1-readnamesorted.sam
// FASTQ=adam-core/src/test/resources/bqsr1-r1.fq
// SECOND_END_FASTQ=adam-core/src/test/resources/bqsr1-r2.fq
// VALIDATION_STRINGENCY=SILENT
// VALIDATION_STRINGENCY=SILENT is necessary since they are unpaired reads and this matches the ADAM default
val fastq1Path = resourcePath("bqsr1-r1.fq")
val fastq2Path = resourcePath("bqsr1-r2.fq")
val outputDir = Files.createTempDir()
val outputFastqR1File = outputDir.getAbsolutePath + "/bqsr1-r1.fq"
val outputFastqR2File = outputDir.getAbsolutePath + "/bqsr1-r2.fq"
// Only looking at mapped reads
// This is because Picard and ADAM disagree on setting negative strand on unmapped reads
// Picard allows unmapped reads to set the negative strand flag and therefore reverse-complemented on output
val reads: RDD[AlignmentRecord] =
sc
.loadAlignments(readsFilepath)
.rdd
.filter(r => r.getReadMapped != null && r.getReadMapped)
reads.saveAsFastq(outputFastqR1File, Some(outputFastqR2File), sort = true)
val goldR1Reads =
scala.io.Source.fromFile(new File(fastq1Path)).getLines().toSeq
val goldR2Reads =
scala.io.Source.fromFile(new File(fastq2Path)).getLines().toSeq
val outputR1Reads = scala.io.Source.fromFile(new File(outputFastqR1File + "/part-00000")).getLines().toSeq
val outputR2Reads = scala.io.Source.fromFile(new File(outputFastqR2File + "/part-00000")).getLines().toSeq
assert(outputR1Reads.length === goldR1Reads.length)
assert(outputR2Reads.length === goldR2Reads.length)
outputR1Reads.zip(goldR1Reads).foreach(kv => assert(kv._1 === kv._2))
outputR2Reads.zip(goldR2Reads).foreach(kv => assert(kv._1 === kv._2))
}
}
| erictu/adam | adam-cli/src/test/scala/org/bdgenomics/adam/cli/ADAM2FastqSuite.scala | Scala | apache-2.0 | 3,448 |
import scala.reflect.runtime.universe._
object Test extends App {
typeOf[Cyclic].members
println("ok")
} | lampepfl/dotty | tests/disabled/reflect/run/t7359/Test_2.scala | Scala | apache-2.0 | 109 |
package aecor.distributedprocessing
import aecor.distributedprocessing.DistributedProcessing._
import aecor.distributedprocessing.DistributedProcessingWorker.KeepRunning
import aecor.distributedprocessing.serialization.Message
import cats.effect.syntax.effect._
import akka.actor.{ Actor, ActorLogging, Props, Status }
import akka.pattern._
import cats.effect.Effect
import cats.implicits._
private[aecor] object DistributedProcessingWorker {
def props[F[_]: Effect](processWithId: Int => Process[F], processName: String): Props =
Props(new DistributedProcessingWorker[F](processWithId, processName))
final case class KeepRunning(workerId: Int) extends Message
}
private[aecor] final class DistributedProcessingWorker[F[_]: Effect](
processFor: Int => Process[F],
processName: String
) extends Actor
with ActorLogging {
import context.dispatcher
case class ProcessStarted(process: RunningProcess[F])
case object ProcessTerminated
var killSwitch: Option[F[Unit]] = None
override def postStop: Unit =
killSwitch.foreach(_.toIO.unsafeRunSync())
def receive: Receive = {
case KeepRunning(workerId) =>
log.info("[{}] Starting process {}", workerId, processName)
processFor(workerId).run
.map(ProcessStarted)
.toIO
.unsafeToFuture() pipeTo self
context.become {
case ProcessStarted(RunningProcess(watchTermination, terminate)) =>
log.info("[{}] Process started {}", workerId, processName)
killSwitch = Some(terminate)
watchTermination.toIO.map(_ => ProcessTerminated).unsafeToFuture() pipeTo self
context.become {
case Status.Failure(e) =>
log.error(e, "Process failed {}", processName)
throw e
case ProcessTerminated =>
log.error("Process terminated {}", processName)
throw new IllegalStateException(s"Process terminated $processName")
}
case Status.Failure(e) =>
log.error(e, "Process failed to start {}", processName)
throw e
case KeepRunning(_) => ()
}
}
}
| notxcain/aecor | modules/distributed-processing/src/main/scala/aecor/distributedprocessing/DistributedProcessingWorker.scala | Scala | mit | 2,123 |
trait P { def reflected: Boolean }
trait Q { def reflected: Boolean }
class PQ(val reflected: Boolean) extends P with Q { }
trait A
trait B
trait C { val y: P }
class ABC extends A with B with C {
private def reflected = (
Thread.currentThread.getStackTrace
takeWhile (_.getMethodName != "main")
exists (_.toString contains ".reflect.")
)
lazy val y: PQ = new PQ(reflected)
}
/*** The source used to generate the second file
Not otherwise used in the test except that compiling
it helps make sure it still compiles.
****/
object Gen {
case class Tp(outer: String, elem: String) {
override def toString = s"$outer { val y: $elem }"
}
case class Pair(tp1: Tp, tp2: Tp) {
def expr = s"((new ABC): $tp)"
def tp = s"($tp1) with ($tp2)"
}
val traits = Vector("Any", "A", "B", "C") map ("%6s" format _)
val types = Vector("P", "Q", "R forSome { type R <: P with Q }")
val allTypes = for (c <- traits ; tp <- types) yield Tp(c, tp)
val pairs = allTypes flatMap (t1 => allTypes map (t2 => Pair(t1, t2)))
val indices = pairs.indices
def aliases(idx: Int) = {
val p = pairs(idx)
import p._
List(
s"type R1_$idx = $tp",
s"type R2_$idx = R1_$idx { val y: (${tp1.elem}) with (${tp2.elem}) }"
)
}
def mkMethodContent(pre: String)(f: Int => String) =
indices map (i => s"def $pre$i${f(i)}") mkString "\\n "
def content = List(
indices flatMap aliases mkString "\\n ",
mkMethodContent("f")(i => s" = { val x = ${pairs(i).expr} ; x.y.reflected -> whatis(x).toString }"),
mkMethodContent("g")(i => s"""(x: R1_$i) = x.y"""),
mkMethodContent("h")(i => s"""(x: R2_$i) = x.y""")
) mkString "\\n "
def fCalls = indices map ("f" + _) mkString ("\\n ", ",\\n ", "\\n ")
def main(args: Array[String]): Unit = {
// One cannot attain proper appreciation for the inadequacies of
// string interpolation without becoming one with the newline.
val nl = "\\\\n"
println(s"""
|import scala.reflect.runtime.universe._
|import scala.language._
|
|object Test {
| def whatis[T: TypeTag](x: T) = typeOf[T]
| def sshow(label: String, xs: Iterable[Any]) {
| println("==== " + label + " ====$nl")
| xs.toList.map("" + _).sorted foreach println
| println("$nl")
| }
|
| $content
| lazy val fcalls = List($fCalls)
|
| def main(args: Array[String]) {
| sshow("Direct Calls", fcalls collect { case (false, n) => n })
| sshow("Reflective Calls", fcalls collect { case (true, n) => n })
| // For a good time try printing this - have to fix bugs in
| // reflection before that's going to be a good idea
| // println(typeOf[Test.type].typeSymbol.asClass.info)
| }
|}
""".stripMargin.trim
)
}
}
| scala/scala | test/files/run/t3425b/Base_1.scala | Scala | apache-2.0 | 2,875 |
package support
import org.scalatest.exceptions.{TestFailedException, TestPendingException}
class PauseException(val message: String) extends RuntimeException
class MyException(val fileName: Option[String], val ctx: TestContext, val errors: Option[List[Int]], val message: Option[String], val cause: Throwable) extends Exception(message.getOrElse(""), cause) {}
class MyTestPauseException(fileName: Option[String], ctx: TestContext, errors: Option[List[Int]], message: Option[String], cause: Throwable) extends MyException(fileName, ctx, errors, message, cause) {}
class MyTestPendingException(fileName: Option[String], ctx: TestContext, errors: Option[List[Int]], message: Option[String], cause: Throwable) extends MyException(fileName, ctx, errors, message, cause) {}
class MyNotImplementedException(fileName: Option[String], ctx: TestContext, errors: Option[List[Int]], message: Option[String], cause: Throwable) extends MyException(fileName, ctx, errors, message, cause) {}
class MyTestFailedException(fileName: Option[String], ctx: TestContext, errors: Option[List[Int]], message: Option[String], cause: Throwable) extends MyException(fileName, ctx, errors, message, cause) {}
object MyException {
def pause(suite: HandsOnSuite, ctx: TestContext, e: PauseException): MyTestPauseException = {
val stack = e.getStackTrace()(2)
val location = getLocation(suite, stack)
new MyTestPauseException(Some(location), ctx, None, Some(e.message), e)
}
def pending(suite: HandsOnSuite, ctx: TestContext, e: TestPendingException): MyTestPendingException = {
val stack = e.getStackTrace()(2)
val location = getLocation(suite, stack)
val errors = List(stack.getLineNumber)
new MyTestPendingException(Some(location), ctx, Some(errors), Some(Formatter.missingValue), e)
}
def notImplemented(suite: HandsOnSuite, ctx: TestContext, e: NotImplementedError): MyNotImplementedException = {
val stacks = e.getStackTrace.take(7).filter(isSuiteClass(suite, _)).toList
val locationOpt = stacks.headOption.map(getLocation(suite, _))
val errors = stacks.map(_.getLineNumber).distinct
new MyNotImplementedException(locationOpt, ctx, nonEmpty(errors), Some(Formatter.missingImplementation), e)
}
def failed(suite: HandsOnSuite, ctx: TestContext, e: TestFailedException): MyTestFailedException = {
val locationOpt = e.failedCodeFileNameAndLineNumberString.map(getPackage(suite, _))
val errors = e.failedCodeLineNumber.toList
new MyTestFailedException(locationOpt, ctx, Some(errors), Option(e.getMessage), e)
}
def unknown(suite: HandsOnSuite, ctx: TestContext, e: Throwable): MyException = {
val stacks = e.getStackTrace.filter(isSuiteClass(suite, _)).toList
val locationOpt = stacks.headOption.map(getLocation(suite, _))
val errors = stacks.map(_.getLineNumber).distinct
new MyException(locationOpt, ctx, nonEmpty(errors), Some(e.toString), e)
}
private def getPackage(suite: HandsOnSuite, name: String): String =
List("src", "test", "scala", suite.getClass.getPackage.getName, name).mkString(java.io.File.separator)
private def getLocation(suite: HandsOnSuite, st: StackTraceElement): String =
getPackage(suite, st.getFileName + ":" + st.getLineNumber)
private def isSuiteClass(suite: HandsOnSuite, e: StackTraceElement): Boolean =
e.getClassName.contains(suite.getClass.getName)
private def nonEmpty[T](l: List[T]): Option[List[T]] =
if (l.nonEmpty) Some(l) else None
}
| loicknuchel/scala-class | src/main/scala/support/CustomException.scala | Scala | unlicense | 3,478 |
package org.scalaide.util.internal.eclipse
import org.eclipse.jface.text.Document
import org.eclipse.jface.text.TextSelection
import org.eclipse.text.edits.MultiTextEdit
import org.eclipse.text.edits.ReplaceEdit
import org.junit.ComparisonFailure
import org.junit.Test
import org.junit.Ignore
class TextSelectionTest {
final implicit class OhILikeThisDslSoMuch(input: String) {
def becomes(expectedOutput: String) = input -> expectedOutput
}
final implicit class IWannaHaveMoreOfIt(testData: (String, String)) {
def after(changes: Seq[String]) = test(testData._1, testData._2, changes)
}
/**
* Test if the text selection is correct when `changes` are made to `input`,
* which then result to `expectedOutput`.
*
* Features:
* - Regions that should be added or removed need to be surrounded by []
* - For each region in the test string a value need to exist in `changes`. In
* case a region needs to be removed, the value needs to be empty. In case
* a region should be added or replaced, the value needs to be non empty.
* - The cursor position is determined by a ^. If a region should be selected,
* a second ^ needs to be placed. The region between the two ^ then
* determines the selection.
* - If trailing whitespace needs to exist in the test string, a trailing $
* can be placed after the whitespace.
*/
final def test(input: String, expectedOutput: String, changes: Seq[String]): Unit = {
val carets = input.count(_ == '^')
require(carets == 1 || carets == 2, "No selection specified.")
val selStart = input.indexOf('^')
val selEnd = {
val e = input.indexOf('^', selStart+1)
if (e > selStart) e else selStart
}
def findBraces(pos: Int, m: Map[Int, Int]): Map[Int, Int] = {
val open = input.indexOf("[", pos)
if (open < 0)
m
else {
val close = input.indexOf(']', open+1)
val s = m.size*2
// ^ = selStart/selEnd, [ = open, ] = close
// case 1: ^ ^ [ ], ^ [ ]
if (selStart < open && selEnd < open)
findBraces(close+1, m + ((open-s-carets, close-s-carets-1)))
// case 2: ^ [ ^ ]
else if (selStart < open && selEnd < close)
findBraces(close+1, m + ((open-s-1, close-s-3)))
// case 3: ^ [ ] ^
else if (selStart < open && selEnd > close)
findBraces(close+1, m + ((open-s-1, close-s-2)))
// case 4: [^ ^], [ ^ ]
else if (selStart < close && selEnd < close)
findBraces(close+1, m + ((open-s, close-s-carets-1)))
// case 5: [ ^ ] ^
else if (selStart < close && selEnd > close)
findBraces(close+1, m + ((open-s, close-s-2)))
// case 6: [ ] ^ ^, [ ] ^
else// if (selStart > close && selEnd > close)
findBraces(close+1, m + ((open-s, close-s-1)))
}
}
def findSelection(source: String): TextSelection = {
val open = source.indexOf('^')
val close = source.indexOf('^', open+1)
new TextSelection(open, if (close >= open) close-open-1 else 0)
}
require(input.count(_ == '[') == input.count(_ == ']'), "Invalid range area found.")
val braces = findBraces(0, Map())
val sourceWithoutBraces = input.replaceAll("\\\\[|\\\\]", "")
require(braces.size == changes.size, "The number of changes need to be equal to the number of the regions.")
val edit = new MultiTextEdit
changes zip braces foreach {
case (change, (start, end)) =>
edit.addChild(new ReplaceEdit(start, end-start, change))
}
val sel = findSelection(sourceWithoutBraces)
val sourceWithoutCursor = sourceWithoutBraces.replaceAll("\\\\^", "")
val doc = new Document(sourceWithoutCursor)
val s = TextEditUtils.applyMultiTextEdit(doc, sel, edit)
doc.replace(s.getOffset(), 0, "^")
if (s.getLength() > 0)
doc.replace(s.getOffset()+s.getLength()+1, 0, "^")
val expected = expectedOutput.replaceAll("\\\\$", "")
val actual = doc.get()
if (expected != actual) {
throw new ComparisonFailure("", expected, actual)
}
}
@Test
def remove_before_cursor_position() = """|
|class X {
| [def g = 0]
| def f = 0^
|}
|""".stripMargin becomes """|
|class X {
| $
| def f = 0^
|}
|""".stripMargin after Seq("")
@Test
def multiple_remove_before_cursor_position() = """|
|[class S]
|class X {
| [def g = 0]
| def f = 0^
|}
|""".stripMargin becomes """|
|
|class X {
| $
| def f = 0^
|}
|""".stripMargin after Seq("", "")
@Test
def add_before_cursor_position() = """|
|class X {
| []
| def f = 0^
|}
|""".stripMargin becomes """|
|class X {
| def g = 0
| def f = 0^
|}
|""".stripMargin after Seq("def g = 0")
@Test
def multiple_add_before_cursor_position() = """|
|[]
|class X {
| []
| def f = 0^
|}
|""".stripMargin becomes """|
|class S
|class X {
| def g = 0
| def f = 0^
|}
|""".stripMargin after Seq("class S", "def g = 0")
@Test
def remove_after_cursor_position() = """|
|class X {
| def f = 0^
| [def g = 0]
|}
|""".stripMargin becomes """|
|class X {
| def f = 0^
| $
|}
|""".stripMargin after Seq("")
@Test
def multiple_remove_after_cursor_position() = """|
|class X {
| def f = 0^
| [def g = 0]
|}
|[class S]
|""".stripMargin becomes """|
|class X {
| def f = 0^
| $
|}
|
|""".stripMargin after Seq("", "")
@Test
def add_after_cursor_position() = """|
|class X {
| def f = 0^
| []
|}
|""".stripMargin becomes """|
|class X {
| def f = 0^
| def g = 0
|}
|""".stripMargin after Seq("def g = 0")
@Test
def multiple_add_after_cursor_position() = """|
|class X {
| def f = 0^
| []
|}
|[]
|""".stripMargin becomes """|
|class X {
| def f = 0^
| def g = 0
|}
|class S
|""".stripMargin after Seq("def g = 0", "class S")
@Test
def remove_before_and_after_cursor_position() = """|
|class X {
| [def g = 0]
| def f = 0^
| [def x = 0]
|}
|""".stripMargin becomes """|
|class X {
| $
| def f = 0^
| $
|}
|""".stripMargin after Seq("", "")
@Test
def multiple_remove_before_and_after_cursor_position() = """|
|[class S]
|class X {
| [def g = 0]
| def f = 0^
| [def x = 0]
|}
|[class M]
|""".stripMargin becomes """|
|
|class X {
| $
| def f = 0^
| $
|}
|
|""".stripMargin after Seq("", "", "", "")
@Test
def add_before_and_after_cursor_position() = """|
|class X {
| []
| def f = 0^
| []
|}
|""".stripMargin becomes """|
|class X {
| def g = 0
| def f = 0^
| def x = 0
|}
|""".stripMargin after Seq("def g = 0", "def x = 0")
@Test
def multiple_add_before_and_after_cursor_position() = """|
|[]
|class X {
| []
| def f = 0^
| []
|}
|[]
|""".stripMargin becomes """|
|class S
|class X {
| def g = 0
| def f = 0^
| def x = 0
|}
|class M
|""".stripMargin after Seq("class S", "def g = 0", "def x = 0", "class M")
@Test
def remove_and_add_before_and_after_cursor_position() = """|
|[class S]
|class X {
| []
| def f = 0^
| []
|}
|[class M]
|""".stripMargin becomes """|
|
|class X {
| def g = 0
| def f = 0^
| def x = 0
|}
|
|""".stripMargin after Seq("", "def g = 0", "def x = 0", "")
@Test
def remove_after_cursor_with_cursor_at_beginning_of_range() = """|
|class X {
| def f = 0^[ ]
| [def g = 0]
|}
|""".stripMargin becomes """|
|class X {
| def f = 0^
| $
|}
|""".stripMargin after Seq("", "")
@Test
def remove_after_cursor_with_cursor_at_end_of_range() = """|
|class X {
| def f = 0[ ]^
| [def g = 0]
|}
|""".stripMargin becomes """|
|class X {
| def f = 0^
| $
|}
|""".stripMargin after Seq("", "")
@Test
def remove_before_and_after_cursor_with_cursor_at_beginning_of_range() = """|
|class X {
| [def g = 0]
| def f = 0^[ ]
|}
|""".stripMargin becomes """|
|class X {
| $
| def f = 0^
|}
|""".stripMargin after Seq("", "")
@Test
def remove_before_and_after_with_cursor_at_end_of_range() = """|
|class X {
| [def g = 0]
| def f = 0[ ]^
|}
|""".stripMargin becomes """|
|class X {
| $
| def f = 0^
|}
|""".stripMargin after Seq("", "")
@Test
def add_after_cursor_with_cursor_at_beginning_of_range() = """|
|class X {
| def f = 0^[]
| []
|}
|""".stripMargin becomes """|
|class X {
| def f = 0+1^
| def g = 0
|}
|""".stripMargin after Seq("+1", "def g = 0")
@Test
def add_before_and_after_cursor_with_cursor_at_beginning_of_range() = """|
|class X {
| []
| def f = 0^[]
|}
|""".stripMargin becomes """|
|class X {
| def g = 0
| def f = 0+1^
|}
|""".stripMargin after Seq("def g = 0", "+1")
@Test
def remove_after_cursor_with_cursor_inside_of_range() = """|
|class X {
| def f = 0[ ^ ]
| [def g = 0]
|}
|""".stripMargin becomes """|
|class X {
| def f = 0^
| $
|}
|""".stripMargin after Seq("", "")
@Test
def remove_before_cursor_with_cursor_inside_of_range() = """|
|class X {
| [def g = 0]
| def f = 0[ ^ ]
|}
|""".stripMargin becomes """|
|class X {
| $
| def f = 0^
|}
|""".stripMargin after Seq("", "")
// tests for selections
@Test
def remove_with_selection_case_1() = """|
|class X {
| ^def g = 0^
| [def f = 0]
|}
|""".stripMargin becomes """|
|class X {
| ^def g = 0^
| $
|}
|""".stripMargin after Seq("")
@Test
def remove_with_selection_case_2() = """|
|^class X {
| [def g^ = 0]
| def f = 0
|}
|""".stripMargin becomes """|
|^class X {
| $^
| def f = 0
|}
|""".stripMargin after Seq("")
@Test
def remove_with_selection_case_3() = """|
|class M
|^class X {
| [def g = 0]
| def f = 0
|}^
|class S
|""".stripMargin becomes """|
|class M
|^class X {
| $
| def f = 0
|}^
|class S
|""".stripMargin after Seq("")
@Test
def remove_with_selection_case_3_at_end_of_selection() = """|
|class X {
| ^def f = 0[ ]^
|}
|""".stripMargin becomes """|
|class X {
| ^def f = 0^
|}
|""".stripMargin after Seq("")
@Test
def remove_with_selection_case_4() = """|
|class X {
| [def ^g =^ 0]
| def f = 0
|}
|""".stripMargin becomes """|
|class X {
| $^
| def f = 0
|}
|""".stripMargin after Seq("")
@Test
def remove_with_selection_case_5() = """|
|class X {
| [def ^g = 0]
| def f = 0^
|}
|""".stripMargin becomes """|
|class X {
| $^
| def f = 0^
|}
|""".stripMargin after Seq("")
@Test
def remove_with_selection_case_6() = """|
|class X {
| [def g = 0]
| ^def f = 0^
|}
|""".stripMargin becomes """|
|class X {
| $
| ^def f = 0^
|}
|""".stripMargin after Seq("")
@Test
def add_with_selection_case_1() = """|
|class X {
| ^def g = 0^
| []
|}
|""".stripMargin becomes """|
|class X {
| ^def g = 0^
| def f = 0
|}
|""".stripMargin after Seq("def f = 0")
@Test
def add_with_selection_case_3() = """|
|class M
|^class X {
| []
| def f = 0
|}^
|class S
|""".stripMargin becomes """|
|class M
|^class X {
| def g = 0
| def f = 0
|}^
|class S
|""".stripMargin after Seq("def g = 0")
@Test
def add_with_selection_case_6() = """|
|class X {
| []
| ^def f = 0^
|}
|""".stripMargin becomes """|
|class X {
| def g = 0
| ^def f = 0^
|}
|""".stripMargin after Seq("def g = 0")
}
| romanowski/scala-ide | org.scala-ide.sdt.core.tests/src/org/scalaide/util/internal/eclipse/TextSelectionTest.scala | Scala | bsd-3-clause | 12,311 |
package com.imaginea.activegrid.core.models
import com.imaginea.activegrid.core.utils.ActiveGridUtils
import com.typesafe.scalalogging.Logger
import org.neo4j.graphdb.Node
import org.slf4j.LoggerFactory
/**
* Created by shareefn on 31/10/16.
*/
case class LoadBalancer(override val id: Option[Long],
name: String,
vpcId: Option[String],
region: Option[String],
instanceIds: List[String],
availabilityZones: List[String]) extends BaseEntity
object LoadBalancer {
val logger = Logger(LoggerFactory.getLogger(getClass.getName))
val label = "LoadBalancer"
def fromNeo4jGraph(id: Long): Option[LoadBalancer] = {
val mayBeNode = Neo4jRepository.findNodeById(id)
mayBeNode match {
case Some(node) =>
val map = Neo4jRepository.getProperties(node, "name", "vpcId", "region", "instanceIds", "availabilityZones")
val loadBalancer = LoadBalancer(
Some(node.getId),
map("name").toString,
ActiveGridUtils.getValueFromMapAs[String](map, "vpcId"),
ActiveGridUtils.getValueFromMapAs[String](map, "region"),
map("instanceIds").asInstanceOf[Array[String]].toList,
map("availabilityZones").asInstanceOf[Array[String]].toList
)
Some(loadBalancer)
case None => None
}
}
implicit class RichLoadBalancer(loadBalancer: LoadBalancer) extends Neo4jRep[LoadBalancer] {
override def toNeo4jGraph(entity: LoadBalancer): Node = {
logger.debug(s"toGraph for LoadBalancer $entity")
val map = Map(
"name" -> entity.name,
"vpcId" -> entity.vpcId,
"region" -> entity.region,
"instanceIds" -> entity.instanceIds.toArray,
"availabilityZones" -> entity.availabilityZones.toArray
)
val node = Neo4jRepository.saveEntity[LoadBalancer](label, entity.id, map)
logger.debug(s"node - $node")
node
}
override def fromNeo4jGraph(id: Long): Option[LoadBalancer] = {
LoadBalancer.fromNeo4jGraph(id)
}
}
}
| eklavya/activeGrid | src/main/scala/com/imaginea/activegrid/core/models/LoadBalancer.scala | Scala | apache-2.0 | 2,115 |
package vmart.parquet
// Generated on: 2017:06:06-15:57:42,199
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.{MetadataBuilder, _}
object Date_dimension_CSV2Parquet_StructField {
def main(args: Array[String]): Unit = {
val Date_dimension = StructType(
StructField(name = "date_key", dataType = IntegerType, nullable = false, metadata = new MetadataBuilder().putString("description", "date_key").build())
:: StructField(name = "date", dataType = DateType, nullable = true, metadata = new MetadataBuilder().putString("description", "date").build())
:: StructField(name = "full_date_description", dataType = StringType, nullable = true, metadata = new MetadataBuilder().putString("description", "full_date_description").build())
:: StructField(name = "day_of_week", dataType = StringType, nullable = true, metadata = new MetadataBuilder().putString("description", "day_of_week").build())
:: StructField(name = "day_number_in_calendar_month", dataType = IntegerType, nullable = true, metadata = new MetadataBuilder().putString("description", "day_number_in_calendar_month").build())
:: StructField(name = "day_number_in_calendar_year", dataType = IntegerType, nullable = true, metadata = new MetadataBuilder().putString("description", "day_number_in_calendar_year").build())
:: StructField(name = "day_number_in_fiscal_month", dataType = IntegerType, nullable = true, metadata = new MetadataBuilder().putString("description", "day_number_in_fiscal_month").build())
:: StructField(name = "day_number_in_fiscal_year", dataType = IntegerType, nullable = true, metadata = new MetadataBuilder().putString("description", "day_number_in_fiscal_year").build())
:: StructField(name = "last_day_in_week_indicator", dataType = IntegerType, nullable = true, metadata = new MetadataBuilder().putString("description", "last_day_in_week_indicator").build())
:: StructField(name = "last_day_in_month_indicator", dataType = IntegerType, nullable = true, metadata = new MetadataBuilder().putString("description", "last_day_in_month_indicator").build())
:: StructField(name = "calendar_week_number_in_year", dataType = IntegerType, nullable = true, metadata = new MetadataBuilder().putString("description", "calendar_week_number_in_year").build())
:: StructField(name = "calendar_month_name", dataType = StringType, nullable = true, metadata = new MetadataBuilder().putString("description", "calendar_month_name").build())
:: StructField(name = "calendar_month_number_in_year", dataType = IntegerType, nullable = true, metadata = new MetadataBuilder().putString("description", "calendar_month_number_in_year").build())
:: StructField(name = "calendar_year_month", dataType = StringType, nullable = true, metadata = new MetadataBuilder().putString("description", "calendar_year_month").build())
:: StructField(name = "calendar_quarter", dataType = IntegerType, nullable = true, metadata = new MetadataBuilder().putString("description", "calendar_quarter").build())
:: StructField(name = "calendar_year_quarter", dataType = StringType, nullable = true, metadata = new MetadataBuilder().putString("description", "calendar_year_quarter").build())
:: StructField(name = "calendar_half_year", dataType = IntegerType, nullable = true, metadata = new MetadataBuilder().putString("description", "calendar_half_year").build())
:: StructField(name = "calendar_year", dataType = IntegerType, nullable = true, metadata = new MetadataBuilder().putString("description", "calendar_year").build())
:: StructField(name = "holiday_indicator", dataType = StringType, nullable = true, metadata = new MetadataBuilder().putString("description", "holiday_indicator").build())
:: StructField(name = "weekday_indicator", dataType = StringType, nullable = true, metadata = new MetadataBuilder().putString("description", "weekday_indicator").build())
:: StructField(name = "selling_season", dataType = StringType, nullable = true, metadata = new MetadataBuilder().putString("description", "selling_season").build())
:: Nil
)
val sparkSession = SparkSession
.builder()
.master("local")
.appName("Date_dimension_CSV2Parquet_StructField")
.getOrCreate()
val df = sparkSession .read
.format("com.databricks.spark.csv")
.option("header", "false") //read the headers
// .option("mode", "FAILFAST")
.option("mode", "DROPMALFORMED")
.option("delimiter", "|")
.option("inferSchema", "false")
.option("charset", "UTF-8")
.option("dateFormat", "MM/dd/yyyy")
.schema(Date_dimension)
.load("/tmp/ros/date_dimension.csv.gz")
df.write.mode("overwrite").parquet("/tmp/vertica/data/parquet/date_dimension.parquet")
}
}
| pborne/VerticaOnMapR | scala/Date_dimension_CSV2Parquet_StructField.scala | Scala | apache-2.0 | 4,812 |
package org.cloudfun.util
import _root_.java.text.DateFormat
import _root_.java.util.Date
import _root_.java.util.logging.{LogRecord, Formatter}
/**
* Formats log messages in a sane-to-read way.
*/
object OneLineLogFormatter extends Formatter {
def format(record: LogRecord): String = {
val time = new Date(record.getMillis)
val sb = new StringBuilder()
sb.append(time.toGMTString)
.append(" [")
.append(record.getLevel.toString)
.append("] ")
/*
.append(record.getLoggerName)
.append(": ")
*/
.append(record.getMessage)
.append("\\n")
if (record.getThrown != null) {
sb.append("\\n ")
.append(record.getThrown)
.append("\\n ")
.append(record.getThrown.getStackTrace.mkString("\\n "))
}
sb.toString
}
}
| zzorn/cloudfun | src/main/scala/org/cloudfun/util/OneLineLogFormatter.scala | Scala | lgpl-3.0 | 816 |
package com.cloudray.scalapress.section
import org.scalatest.{OneInstancePerTest, FunSuite}
import org.scalatest.mock.MockitoSugar
import javax.servlet.http.HttpServletRequest
import com.cloudray.scalapress.item.{ItemType, Item}
import com.cloudray.scalapress.framework.{ScalapressRequest, ScalapressContext}
/** @author Stephen Samuel */
class SectionRendererTest extends FunSuite with MockitoSugar with OneInstancePerTest {
val req = mock[HttpServletRequest]
val context = mock[ScalapressContext]
val sreq = ScalapressRequest(req, context)
val section1 = new StringSection("kirk")
section1.visible = true
section1.position = 99
val section2 = new StringSection("kaaaan")
section2.visible = true
section2.position = 14
test("renderer only includes visible sections") {
section2.visible = false
val rendered = SectionRenderer._render(Seq(section1, section2), sreq)
assert(rendered.contains("kirk"))
assert(!rendered.contains("kaaaan"))
}
test("rendered for objects includes sections from item type") {
val obj = new Item
obj.sections.add(section1)
obj.itemType = new ItemType
obj.itemType.sections.add(section2)
val output = SectionRenderer.render(obj, sreq)
assert(output.contains("kirk"))
assert(output.contains("kaaaan"))
}
test("renderer sorts by section position") {
val rendered = SectionRenderer._render(Seq(section2, section1), sreq)
assert("(?s).*kaaaan.*kirk.*".r.findFirstIn(rendered).isDefined)
}
}
class StringSection(val string: String) extends Section {
def render(request: ScalapressRequest): Option[String] = Some(string)
def desc: String = "StringSection"
} | vidyacraghav/scalapress | src/test/scala/com/cloudray/scalapress/section/SectionRendererTest.scala | Scala | apache-2.0 | 1,672 |
package metabrowse
import monaco.languages.SymbolKind
import metabrowse.{schema => d}
import scala.meta.internal.semanticdb.SymbolInformation
/** "Go to symbol" eligible definition */
case class DocumentSymbol(
info: SymbolInformation,
kind: SymbolKind,
definition: d.Position
)
| scalameta/metadoc | metabrowse-js/src/main/scala/metabrowse/DocumentSymbol.scala | Scala | apache-2.0 | 293 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import org.mockito.Mockito._
import uk.gov.hmrc.ct.accounts.frs102.retriever.Frs102AccountsBoxRetriever
import uk.gov.hmrc.ct.accounts.validation.{Frs102TestBoxRetriever, ValidateAssetsEqualSharesSpec}
import uk.gov.hmrc.ct.box.retriever.FilingAttributesBoxValueRetriever
class AC81Spec extends ValidateAssetsEqualSharesSpec[Frs102AccountsBoxRetriever with FilingAttributesBoxValueRetriever] {
override def addOtherBoxValue100Mock(mockRetriever: Frs102AccountsBoxRetriever with FilingAttributesBoxValueRetriever) =
when(mockRetriever.ac69()).thenReturn(AC69(Some(100)))
override def addOtherBoxValueNoneMock(mockRetriever: Frs102AccountsBoxRetriever with FilingAttributesBoxValueRetriever) =
when(mockRetriever.ac69()).thenReturn(AC69(None))
testAssetsEqualToSharesValidation("AC81", AC81.apply)
override def createMock(): Frs102AccountsBoxRetriever with FilingAttributesBoxValueRetriever = mock[Frs102TestBoxRetriever]
}
| hmrc/ct-calculations | src/test/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC81Spec.scala | Scala | apache-2.0 | 1,594 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.data.orm
import org.beangle.commons.collection.Collections
import org.beangle.commons.lang.Strings
import org.beangle.data.jdbc.meta.{Column, Table}
import org.beangle.data.model.meta._
import scala.collection.mutable
trait ColumnHolder {
def columns: Iterable[Column]
}
class SimpleColumn(column: Column) extends ColumnHolder {
require(null != column)
def columns: Iterable[Column] = List(column)
}
trait OrmType extends Cloneable with Type {
def copy(): OrmType
}
trait OrmStructType extends OrmType with StructType {
var properties: mutable.Map[String, OrmProperty] = Collections.newMap[String, OrmProperty]
/** 获取属性对应的属性映射,支持嵌入式属性
*
* @param name property name
* @return
*/
override def property(name: String): OrmProperty = {
val idx = name.indexOf(".")
if (idx == -1) {
properties(name)
} else {
val sp = properties(name.substring(0, idx)).asInstanceOf[OrmSingularProperty]
sp.propertyType.asInstanceOf[OrmStructType].property(name.substring(idx + 1))
}
}
override def getProperty(name: String): Option[OrmProperty] = {
val idx = name.indexOf(".")
if (idx == -1) {
properties.get(name)
} else {
val sp = properties(name.substring(0, idx)).asInstanceOf[OrmSingularProperty]
sp.propertyType.asInstanceOf[OrmStructType].getProperty(name.substring(idx + 1))
}
}
def addProperty(property: OrmProperty): Unit = {
properties.put(property.name, property)
}
}
final class OrmEntityType(val entityName: String, var clazz: Class[_], var table: Table) extends OrmStructType with EntityType {
var cacheUsage: String = _
var cacheRegion: String = _
var cacheAll: Boolean = _
var isLazy: Boolean = true
var proxy: String = _
var isAbstract: Boolean = _
var optimisticLockStyle: Int = -1
var idGenerator: IdGenerator = _
var module: Option[String] = None
def cacheable: Boolean = {
Strings.isNotBlank(cacheUsage)
}
def cache(region: String, usage: String): this.type = {
this.cacheRegion = region
this.cacheUsage = usage
this
}
override def id: OrmProperty = {
properties("id")
}
def copy(): this.type = {
this
}
def addProperties(added: collection.Map[String, OrmProperty]): Unit = {
if (added.nonEmpty) {
properties ++= added
inheriteColumns(this.table, added)
}
}
private def inheriteColumns(table: Table, inheris: collection.Map[String, OrmProperty]): Unit = {
inheris.values foreach {
case spm: OrmSingularProperty =>
spm.propertyType match {
case etm: OrmEmbeddableType => inheriteColumns(table, etm.properties)
case _ => spm.columns foreach table.add
}
case _ =>
}
}
}
final class OrmBasicType(clazz: Class[_], var column: Column) extends BasicType(clazz)
with OrmType with Cloneable with ColumnHolder {
def copy(): OrmBasicType = {
val cloned = super.clone().asInstanceOf[OrmBasicType]
cloned.column = column.clone()
cloned
}
override def columns: Iterable[Column] = {
List(column)
}
}
final class OrmEmbeddableType(var clazz: Class[_]) extends EmbeddableType with OrmStructType {
var parentName: Option[String] = None
def copy(): OrmEmbeddableType = {
val cloned = super.clone().asInstanceOf[OrmEmbeddableType]
val cp = Collections.newMap[String, OrmProperty]
properties foreach {
case (name, p) =>
cp += (name -> p.copy())
}
cloned.properties = cp
cloned
}
}
class TypeDef(val clazz: String, val params: Map[String, String])
final class Collection(val clazz: Class[_], val property: String) {
var cacheRegion: String = _
var cacheUsage: String = _
def this(clazz: Class[_], property: String, region: String, usage: String) = {
this(clazz, property)
cache(region, usage)
}
def cache(region: String, usage: String): this.type = {
this.cacheRegion = region
this.cacheUsage = usage
this
}
}
object IdGenerator {
val Date = "date"
val DateTime = "datetime"
val AutoIncrement = "auto_increment"
val SeqPerTable = "seq_per_table"
val Code = "code"
val Assigned = "assigned"
val Uuid = "uuid"
val Sequence = "sequence"
val Identity = "identity"
val Native = "native"
}
final class IdGenerator(var name: String) {
val params: mutable.Map[String, String] = Collections.newMap[String, String]
var nullValue: Option[String] = None
def unsaved(value: String): this.type = {
nullValue = Some(value)
this
}
}
| beangle/data | orm/src/main/scala/org/beangle/data/orm/OrmType.scala | Scala | lgpl-3.0 | 5,294 |
package polytope
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
import scala.collection.mutable.ListBuffer
import scala.collection.immutable.Range
import scala.collection.immutable.Vector
import scala.reflect.runtime.universe._
import scala.reflect.ClassTag
/**
* Collection of algorithms for generating Schubert polynomials.
*/
object SchubertFactory {
/**
* Returns a permutation read from the standard input.
*/
def readPermutation(): Permutation = {
println("Schubert Polynomial Calculator")
println("Please enter the number of symbols: ")
val n = scala.io.StdIn.readInt
return Array.tabulate(n){i => {
printf("%d => ", i+1)
scala.io.StdIn.readInt
}}
}
/**
* Returns the Schubert polynomial associated with a given permutation.
*
* Makes no attempt to check that the Permutation is actually a Permutation
* i.e., that it contains every number 1..n exactly once.
*
* @param perm The permutation.
* @return The Schubert polynomial of `perm`.
*/
def schubertPolynomial(perm: Permutation): Polynomial = {
if (isIdentity(perm)) return ArrayBuffer[Term](0L)
val leadFactor: Term = 0L
return schubertAlgorithm(leadFactor, 0, perm.length-1, perm)
}
/**
* Algorithm adapted from the C source code of the Symmetrica
* library which is freely available from
* [[http://www.algorithm.uni-bayreuth.de/en/research/SYMMETRICA/]]
*/
def schubertAlgorithm(leadFactor: Term, index: Int, exponent: Int,
perm: Permutation): Polynomial = {
// The code actually runs slower when you only define result when it's needed
val result = ArrayBuffer[Term]()
// Set limits of the optimized code
/*
require(index < 16)
require(exponent < 16)
require(perm.length < 16)
*/
if (perm.length == 2) {
if (perm(0) == 2) {
result.append(incExp(leadFactor, index))
} else {
result.append(leadFactor)
}
} else if (perm(0) == perm.length) {
val newPerm: Permutation = perm.drop(1)
val newLeadFactor: Term = changeExp(leadFactor, index, exponent)
return schubertAlgorithm(newLeadFactor, index+1, newPerm.length - 1,
newPerm)
} else {
var max: Int = perm.length + 1
var i: Int = 1
while (i < perm.length) {
if (perm(i) < max && perm(i) > perm(0)) {
max = perm(i)
val newPerm: Permutation =
perm.updated(0, perm(i)).updated(i, perm(0))
addInPlace(result,
schubertAlgorithm(leadFactor, index, exponent-1, newPerm))
}
i += 1
}
}
return result
}
}
| expz/polytope | src/main/scala/SchubertFactory.scala | Scala | gpl-3.0 | 2,807 |
package com.datastax.spark.connector
import scala.collection.immutable
import scala.concurrent.duration._
import akka.util.Timeout
package object testkit {
final val DefaultHost = "127.0.0.1"
implicit val DefaultTimeout = Timeout(5.seconds)
val data = immutable.Set("words ", "may ", "count ")
val actorName = "my-actor"
}
| brkyvz/spark-cassandra-connector | spark-cassandra-connector/src/test/scala/com/datastax/spark/connector/testkit/package.scala | Scala | apache-2.0 | 338 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.utils
import org.apache.flink.table.functions.BuiltInFunctionDefinitions
import org.apache.flink.table.planner.functions.bridging.BridgingSqlFunction
import org.apache.calcite.plan.RelOptUtil
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.core.{JoinRelType, SetOp}
import org.apache.calcite.rex.RexNode
import org.apache.calcite.sql.fun.SqlStdOperatorTable
import org.apache.calcite.tools.RelBuilder
import org.apache.calcite.util.Util
import java.util
import java.util.Collections
/**
* Util class that rewrite [[SetOp]].
*/
object SetOpRewriteUtil {
/**
* Generate equals condition by keys (The index on both sides is the same) to
* join left relNode and right relNode.
*/
def generateEqualsCondition(
relBuilder: RelBuilder,
left: RelNode,
right: RelNode,
keys: Seq[Int]): Seq[RexNode] = {
val rexBuilder = relBuilder.getRexBuilder
val leftTypes = RelOptUtil.getFieldTypeList(left.getRowType)
val rightTypes = RelOptUtil.getFieldTypeList(right.getRowType)
val conditions = keys.map { key =>
val leftRex = rexBuilder.makeInputRef(leftTypes.get(key), key)
val rightRex = rexBuilder.makeInputRef(rightTypes.get(key), leftTypes.size + key)
val equalCond = rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, leftRex, rightRex)
relBuilder.or(
equalCond,
relBuilder.and(relBuilder.isNull(leftRex), relBuilder.isNull(rightRex)))
}
conditions
}
/**
* Use table function to replicate the row N times. First field is long type,
* and the rest are the row fields.
*/
def replicateRows(
relBuilder: RelBuilder,
outputRelDataType: RelDataType,
fields: util.List[Integer]): RelNode = {
val cluster = relBuilder.getCluster
val sqlFunction = BridgingSqlFunction.of(
relBuilder.getCluster,
BuiltInFunctionDefinitions.INTERNAL_REPLICATE_ROWS)
relBuilder
.functionScan(sqlFunction, 0, relBuilder.fields(Util.range(fields.size() + 1)))
.rename(outputRelDataType.getFieldNames)
// correlated join
val corSet = Collections.singleton(cluster.createCorrel())
val output = relBuilder
.join(JoinRelType.INNER, relBuilder.literal(true), corSet)
.project(relBuilder.fields(Util.range(fields.size() + 1, fields.size() * 2 + 1)))
.build()
output
}
}
| apache/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/utils/SetOpRewriteUtil.scala | Scala | apache-2.0 | 3,272 |
package scuff.fsm
object typed {
sealed trait Source[-T] extends BaseState[T]
sealed trait Target[-T] extends BaseState[T]
class SuperState[T](name: String = "") extends Source[T] {
override def toString = if (name.length == 0) super.toString else name
}
class State[T](name: String = "") extends Source[T] with Target[T] {
override def toString = if (name.length == 0) super.toString else name
}
class FinalState[T](name: String = "") extends Target[T] {
override def toString = if (name.length == 0) super.toString else name
}
trait FSM[T] extends typed.SuperState[T] {
private[this] def assignParenthood(superState: typed.SuperState[T], ignore: collection.mutable.Set[Any]): Unit = {
val parent = Some(superState)
val baseStates = superState.getClass.getDeclaredFields.filter(f => classOf[BaseState[T]].isAssignableFrom(f.getType))
baseStates.foreach { field =>
field.setAccessible(true)
val subState = field.get(superState)
if (!ignore.contains(subState)) {
val ss = new scuff.reflect.Surgeon(subState)
ss.parent = parent
ss.assignedName = field.getName
ignore += subState
subState match {
case ss: typed.SuperState[T] => assignParenthood(ss, ignore)
case _ => // Ignore
}
}
}
}
@annotation.tailrec
private def stateMatch(checkState: BaseState[T], current: Option[BaseState[T]]): Boolean = current match {
case None => false
case Some(current) => (current eq checkState) || stateMatch(checkState, current.parent)
}
def is(state: BaseState[T]) = stateMatch(state, currState)
def current = currState
def isFinal = currState match {
case Some(_: FinalState[_]) => true
case _ => false
}
type Transition = ((typed.Source[T], Event), typed.Target[T])
protected def transitions: Set[Transition]
private var currState: Option[typed.Target[T]] = None
private var transitionTable: Map[(typed.Source[T], Event), typed.Target[T]] = _
/** Initialize state. DO NOT call this from the constructor. */
protected def init(state: typed.State[T]) = {
transitionTable = transitions.toMap
require(transitionTable.size == transitions.size, "Duplicate State -> Event transition")
assignParenthood(this, collection.mutable.Set(this))
currState = Option(state)
}
def apply(evt: Event, payload: T = null.asInstanceOf[T]) = currState match {
case None => throw new IllegalStateException("State machine not initialized yet")
case Some(source: typed.Source[T]) =>
val targetEvent = source.onEvent(evt, payload)
val target = transition(source, targetEvent)
if (source ne target) {
currState = Some(target)
target.onEvent(targetEvent, payload)
}
case _ => throw new IllegalStateException("State machine is finalized")
}
@annotation.tailrec
private def transition(state: typed.Source[T], evt: Event): typed.Target[T] = {
val key = state -> evt
transitionTable.get(key) match {
case Some(toState) => toState
case None => state.parent match {
case None => throw new IllegalStateException(s"${currState.get} cannot handle $evt")
case Some(parent) => transition(parent, evt)
}
}
}
}
}
/** Event marker trait. */
trait Event
/** General state representation. */
sealed class BaseState[-T] {
private[fsm] final val parent: Option[typed.SuperState[Any]] = None
private[this] val assignedName: String = getClass.getSimpleName
override def toString: String = parent.map(_.toString concat ".").getOrElse("") concat assignedName
protected[fsm] def onEvent(evt: Event) = ()
protected[fsm] def onEvent(evt: Event, payload: T): Event = { onEvent(evt); evt }
}
/** Super state. Is expected to contain other states as `val`s. */
class SuperState(name: String = "") extends typed.SuperState[Any](name)
/** Leaf state. Should not contain other sub states. */
class State(name: String = "") extends typed.State[Any](name)
/** Final state. */
class FinalState(name: String = "") extends typed.FinalState[Any](name)
/**
* A finite state machine trait, which is itself a super state.
*/
trait FSM extends typed.FSM[Any]
| nilskp/scuff | src/main/scala/scuff/fsm/FSM.scala | Scala | mit | 4,330 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.executor
import java.io.File
import java.net.URL
import java.nio.ByteBuffer
import java.util.Properties
import scala.collection.mutable
import scala.concurrent.duration._
import org.json4s.{DefaultFormats, Extraction}
import org.json4s.JsonAST.{JArray, JObject}
import org.json4s.JsonDSL._
import org.mockito.Mockito.when
import org.scalatest.concurrent.Eventually.{eventually, timeout}
import org.scalatestplus.mockito.MockitoSugar
import org.apache.spark._
import org.apache.spark.TestUtils._
import org.apache.spark.resource._
import org.apache.spark.resource.ResourceUtils._
import org.apache.spark.resource.TestResourceIDs._
import org.apache.spark.rpc.RpcEnv
import org.apache.spark.scheduler.TaskDescription
import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.LaunchTask
import org.apache.spark.serializer.JavaSerializer
import org.apache.spark.util.{SerializableBuffer, Utils}
class CoarseGrainedExecutorBackendSuite extends SparkFunSuite
with LocalSparkContext with MockitoSugar {
implicit val formats = DefaultFormats
test("parsing no resources") {
val conf = new SparkConf
val resourceProfile = ResourceProfile.getOrCreateDefaultProfile(conf)
val serializer = new JavaSerializer(conf)
val env = createMockEnv(conf, serializer)
// we don't really use this, just need it to get at the parser function
val backend = new CoarseGrainedExecutorBackend( env.rpcEnv, "driverurl", "1", "host1", "host1",
4, Seq.empty[URL], env, None, resourceProfile)
withTempDir { tmpDir =>
val testResourceArgs: JObject = ("" -> "")
val ja = JArray(List(testResourceArgs))
val f1 = createTempJsonFile(tmpDir, "resources", ja)
var error = intercept[SparkException] {
val parsedResources = backend.parseOrFindResources(Some(f1))
}.getMessage()
assert(error.contains("Error parsing resources file"),
s"Calling with no resources didn't error as expected, error: $error")
}
}
test("parsing one resource") {
val conf = new SparkConf
conf.set(EXECUTOR_GPU_ID.amountConf, "2")
val serializer = new JavaSerializer(conf)
val env = createMockEnv(conf, serializer)
// we don't really use this, just need it to get at the parser function
val backend = new CoarseGrainedExecutorBackend( env.rpcEnv, "driverurl", "1", "host1", "host1",
4, Seq.empty[URL], env, None, ResourceProfile.getOrCreateDefaultProfile(conf))
withTempDir { tmpDir =>
val ra = ResourceAllocation(EXECUTOR_GPU_ID, Seq("0", "1"))
val ja = Extraction.decompose(Seq(ra))
val f1 = createTempJsonFile(tmpDir, "resources", ja)
val parsedResources = backend.parseOrFindResources(Some(f1))
assert(parsedResources.size === 1)
assert(parsedResources.get(GPU).nonEmpty)
assert(parsedResources.get(GPU).get.name === GPU)
assert(parsedResources.get(GPU).get.addresses.sameElements(Array("0", "1")))
}
}
test("parsing multiple resources resource profile") {
val rpBuilder = new ResourceProfileBuilder
val ereqs = new ExecutorResourceRequests().resource(GPU, 2)
ereqs.resource(FPGA, 3)
val rp = rpBuilder.require(ereqs).build
testParsingMultipleResources(new SparkConf, rp)
}
test("parsing multiple resources") {
val conf = new SparkConf
conf.set(EXECUTOR_GPU_ID.amountConf, "2")
conf.set(EXECUTOR_FPGA_ID.amountConf, "3")
testParsingMultipleResources(conf, ResourceProfile.getOrCreateDefaultProfile(conf))
}
def testParsingMultipleResources(conf: SparkConf, resourceProfile: ResourceProfile): Unit = {
val serializer = new JavaSerializer(conf)
val env = createMockEnv(conf, serializer)
// we don't really use this, just need it to get at the parser function
val backend = new CoarseGrainedExecutorBackend( env.rpcEnv, "driverurl", "1", "host1", "host1",
4, Seq.empty[URL], env, None, resourceProfile)
withTempDir { tmpDir =>
val gpuArgs = ResourceAllocation(EXECUTOR_GPU_ID, Seq("0", "1"))
val fpgaArgs =
ResourceAllocation(EXECUTOR_FPGA_ID, Seq("f1", "f2", "f3"))
val ja = Extraction.decompose(Seq(gpuArgs, fpgaArgs))
val f1 = createTempJsonFile(tmpDir, "resources", ja)
val parsedResources = backend.parseOrFindResources(Some(f1))
assert(parsedResources.size === 2)
assert(parsedResources.get(GPU).nonEmpty)
assert(parsedResources.get(GPU).get.name === GPU)
assert(parsedResources.get(GPU).get.addresses.sameElements(Array("0", "1")))
assert(parsedResources.get(FPGA).nonEmpty)
assert(parsedResources.get(FPGA).get.name === FPGA)
assert(parsedResources.get(FPGA).get.addresses.sameElements(Array("f1", "f2", "f3")))
}
}
test("error checking parsing resources and executor and task configs") {
val conf = new SparkConf
conf.set(EXECUTOR_GPU_ID.amountConf, "2")
val serializer = new JavaSerializer(conf)
val env = createMockEnv(conf, serializer)
// we don't really use this, just need it to get at the parser function
val backend = new CoarseGrainedExecutorBackend(env.rpcEnv, "driverurl", "1", "host1", "host1",
4, Seq.empty[URL], env, None, ResourceProfile.getOrCreateDefaultProfile(conf))
// not enough gpu's on the executor
withTempDir { tmpDir =>
val gpuArgs = ResourceAllocation(EXECUTOR_GPU_ID, Seq("0"))
val ja = Extraction.decompose(Seq(gpuArgs))
val f1 = createTempJsonFile(tmpDir, "resources", ja)
var error = intercept[IllegalArgumentException] {
val parsedResources = backend.parseOrFindResources(Some(f1))
}.getMessage()
assert(error.contains("Resource: gpu, with addresses: 0 is less than what the " +
"user requested: 2"))
}
// missing resource on the executor
withTempDir { tmpDir =>
val fpga = ResourceAllocation(EXECUTOR_FPGA_ID, Seq("0"))
val ja = Extraction.decompose(Seq(fpga))
val f1 = createTempJsonFile(tmpDir, "resources", ja)
var error = intercept[SparkException] {
val parsedResources = backend.parseOrFindResources(Some(f1))
}.getMessage()
assert(error.contains("User is expecting to use resource: gpu, but didn't " +
"specify a discovery script!"))
}
}
test("executor resource found less than required resource profile") {
val rpBuilder = new ResourceProfileBuilder
val ereqs = new ExecutorResourceRequests().resource(GPU, 4)
val treqs = new TaskResourceRequests().resource(GPU, 1)
val rp = rpBuilder.require(ereqs).require(treqs).build
testExecutorResourceFoundLessThanRequired(new SparkConf, rp)
}
test("executor resource found less than required") {
val conf = new SparkConf()
conf.set(EXECUTOR_GPU_ID.amountConf, "4")
conf.set(TASK_GPU_ID.amountConf, "1")
testExecutorResourceFoundLessThanRequired(conf, ResourceProfile.getOrCreateDefaultProfile(conf))
}
private def testExecutorResourceFoundLessThanRequired(
conf: SparkConf,
resourceProfile: ResourceProfile) = {
val serializer = new JavaSerializer(conf)
val env = createMockEnv(conf, serializer)
// we don't really use this, just need it to get at the parser function
val backend = new CoarseGrainedExecutorBackend(env.rpcEnv, "driverurl", "1", "host1", "host1",
4, Seq.empty[URL], env, None, resourceProfile)
// executor resources < required
withTempDir { tmpDir =>
val gpuArgs = ResourceAllocation(EXECUTOR_GPU_ID, Seq("0", "1"))
val ja = Extraction.decompose(Seq(gpuArgs))
val f1 = createTempJsonFile(tmpDir, "resources", ja)
var error = intercept[IllegalArgumentException] {
val parsedResources = backend.parseOrFindResources(Some(f1))
}.getMessage()
assert(error.contains("Resource: gpu, with addresses: 0,1 is less than what the " +
"user requested: 4"))
}
}
test("use resource discovery") {
val conf = new SparkConf
conf.set(EXECUTOR_FPGA_ID.amountConf, "3")
assume(!(Utils.isWindows))
withTempDir { dir =>
val scriptPath = createTempScriptWithExpectedOutput(dir, "fpgaDiscoverScript",
"""{"name": "fpga","addresses":["f1", "f2", "f3"]}""")
conf.set(EXECUTOR_FPGA_ID.discoveryScriptConf, scriptPath)
val serializer = new JavaSerializer(conf)
val env = createMockEnv(conf, serializer)
// we don't really use this, just need it to get at the parser function
val backend = new CoarseGrainedExecutorBackend(env.rpcEnv, "driverurl", "1", "host1", "host1",
4, Seq.empty[URL], env, None, ResourceProfile.getOrCreateDefaultProfile(conf))
val parsedResources = backend.parseOrFindResources(None)
assert(parsedResources.size === 1)
assert(parsedResources.get(FPGA).nonEmpty)
assert(parsedResources.get(FPGA).get.name === FPGA)
assert(parsedResources.get(FPGA).get.addresses.sameElements(Array("f1", "f2", "f3")))
}
}
test("use resource discovery and allocated file option with resource profile") {
assume(!(Utils.isWindows))
withTempDir { dir =>
val scriptPath = createTempScriptWithExpectedOutput(dir, "fpgaDiscoverScript",
"""{"name": "fpga","addresses":["f1", "f2", "f3"]}""")
val rpBuilder = new ResourceProfileBuilder
val ereqs = new ExecutorResourceRequests().resource(FPGA, 3, scriptPath)
ereqs.resource(GPU, 2)
val rp = rpBuilder.require(ereqs).build
allocatedFileAndConfigsResourceDiscoveryTestFpga(dir, new SparkConf, rp)
}
}
test("use resource discovery and allocated file option") {
assume(!(Utils.isWindows))
withTempDir { dir =>
val scriptPath = createTempScriptWithExpectedOutput(dir, "fpgaDiscoverScript",
"""{"name": "fpga","addresses":["f1", "f2", "f3"]}""")
val conf = new SparkConf
conf.set(EXECUTOR_FPGA_ID.amountConf, "3")
conf.set(EXECUTOR_FPGA_ID.discoveryScriptConf, scriptPath)
conf.set(EXECUTOR_GPU_ID.amountConf, "2")
val rp = ResourceProfile.getOrCreateDefaultProfile(conf)
allocatedFileAndConfigsResourceDiscoveryTestFpga(dir, conf, rp)
}
}
private def allocatedFileAndConfigsResourceDiscoveryTestFpga(
dir: File,
conf: SparkConf,
resourceProfile: ResourceProfile) = {
val serializer = new JavaSerializer(conf)
val env = createMockEnv(conf, serializer)
// we don't really use this, just need it to get at the parser function
val backend = new CoarseGrainedExecutorBackend(env.rpcEnv, "driverurl", "1", "host1", "host1",
4, Seq.empty[URL], env, None, resourceProfile)
val gpuArgs = ResourceAllocation(EXECUTOR_GPU_ID, Seq("0", "1"))
val ja = Extraction.decompose(Seq(gpuArgs))
val f1 = createTempJsonFile(dir, "resources", ja)
val parsedResources = backend.parseOrFindResources(Some(f1))
assert(parsedResources.size === 2)
assert(parsedResources.get(GPU).nonEmpty)
assert(parsedResources.get(GPU).get.name === GPU)
assert(parsedResources.get(GPU).get.addresses.sameElements(Array("0", "1")))
assert(parsedResources.get(FPGA).nonEmpty)
assert(parsedResources.get(FPGA).get.name === FPGA)
assert(parsedResources.get(FPGA).get.addresses.sameElements(Array("f1", "f2", "f3")))
}
test("track allocated resources by taskId") {
val conf = new SparkConf
val securityMgr = new SecurityManager(conf)
val serializer = new JavaSerializer(conf)
var backend: CoarseGrainedExecutorBackend = null
try {
val rpcEnv = RpcEnv.create("1", "localhost", 0, conf, securityMgr)
val env = createMockEnv(conf, serializer, Some(rpcEnv))
backend = new CoarseGrainedExecutorBackend(env.rpcEnv, rpcEnv.address.hostPort, "1",
"host1", "host1", 4, Seq.empty[URL], env, None,
resourceProfile = ResourceProfile.getOrCreateDefaultProfile(conf))
assert(backend.taskResources.isEmpty)
val taskId = 1000000
// We don't really verify the data, just pass it around.
val data = ByteBuffer.wrap(Array[Byte](1, 2, 3, 4))
val taskDescription = new TaskDescription(taskId, 2, "1", "TASK 1000000", 19,
1, mutable.Map.empty, mutable.Map.empty, new Properties,
Map(GPU -> new ResourceInformation(GPU, Array("0", "1"))), data)
val serializedTaskDescription = TaskDescription.encode(taskDescription)
backend.executor = mock[Executor]
backend.rpcEnv.setupEndpoint("Executor 1", backend)
// Launch a new task shall add an entry to `taskResources` map.
backend.self.send(LaunchTask(new SerializableBuffer(serializedTaskDescription)))
eventually(timeout(10.seconds)) {
assert(backend.taskResources.size == 1)
val resources = backend.taskResources(taskId)
assert(resources(GPU).addresses sameElements Array("0", "1"))
}
// Update the status of a running task shall not affect `taskResources` map.
backend.statusUpdate(taskId, TaskState.RUNNING, data)
assert(backend.taskResources.size == 1)
val resources = backend.taskResources(taskId)
assert(resources(GPU).addresses sameElements Array("0", "1"))
// Update the status of a finished task shall remove the entry from `taskResources` map.
backend.statusUpdate(taskId, TaskState.FINISHED, data)
assert(backend.taskResources.isEmpty)
} finally {
if (backend != null) {
backend.rpcEnv.shutdown()
}
}
}
test("SPARK-24203 when bindAddress is not set, it defaults to hostname") {
val args1 = Array(
"--driver-url", "driverurl",
"--executor-id", "1",
"--hostname", "host1",
"--cores", "1",
"--app-id", "app1")
val arg = CoarseGrainedExecutorBackend.parseArguments(args1, "")
assert(arg.bindAddress == "host1")
}
test("SPARK-24203 when bindAddress is different, it does not default to hostname") {
val args1 = Array(
"--driver-url", "driverurl",
"--executor-id", "1",
"--hostname", "host1",
"--bind-address", "bindaddress1",
"--cores", "1",
"--app-id", "app1")
val arg = CoarseGrainedExecutorBackend.parseArguments(args1, "")
assert(arg.bindAddress == "bindaddress1")
}
private def createMockEnv(conf: SparkConf, serializer: JavaSerializer,
rpcEnv: Option[RpcEnv] = None): SparkEnv = {
val mockEnv = mock[SparkEnv]
val mockRpcEnv = mock[RpcEnv]
when(mockEnv.conf).thenReturn(conf)
when(mockEnv.serializer).thenReturn(serializer)
when(mockEnv.closureSerializer).thenReturn(serializer)
when(mockEnv.rpcEnv).thenReturn(rpcEnv.getOrElse(mockRpcEnv))
SparkEnv.set(mockEnv)
mockEnv
}
}
| shuangshuangwang/spark | core/src/test/scala/org/apache/spark/executor/CoarseGrainedExecutorBackendSuite.scala | Scala | apache-2.0 | 15,469 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package system.basic
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import common.JsHelpers
import common.TestHelpers
import common.TestUtils
import common.BaseWsk
import common.WskProps
import common.WskTestHelpers
import spray.json.pimpString
import spray.json.JsString
import common.TestUtils.RunResult
import spray.json.JsObject
@RunWith(classOf[JUnitRunner])
abstract class WskBasicSwift3Tests extends TestHelpers with WskTestHelpers with JsHelpers {
implicit val wskprops: common.WskProps = WskProps()
val wsk: BaseWsk
val defaultAction: Some[String] = Some(TestUtils.getTestActionFilename("hello.swift"))
lazy val currentSwiftDefaultKind = "swift:3"
behavior of "Swift runtime"
it should "Ensure that Swift actions can have a non-default entrypoint" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
val name = "niamSwiftAction"
val file = Some(TestUtils.getTestActionFilename("niam.swift"))
assetHelper.withCleaner(wsk.action, name) { (action, _) =>
action.create(name, file, main = Some("niam"))
}
withActivation(wsk.activation, wsk.action.invoke(name)) { activation =>
val response = activation.response
response.result.get.fields.get("error") shouldBe empty
response.result.get.fields.get("greetings") should be(Some(JsString("Hello from a non-standard entrypoint.")))
}
}
def convertRunResultToJsObject(result: RunResult): JsObject = {
val stdout = result.stdout
val firstNewline = stdout.indexOf("\\n")
stdout.substring(firstNewline + 1).parseJson.asJsObject
}
}
| duynguyen/incubator-openwhisk | tests/src/test/scala/system/basic/WskBasicSwift3Tests.scala | Scala | apache-2.0 | 2,416 |
/*
* Copyright 2010 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.logging
import java.io._
import java.util.{Calendar, Date, logging => javalog}
import org.junit.runner.RunWith
import org.scalatest.WordSpec
import org.scalatest.junit.JUnitRunner
import com.twitter.conversions.storage._
import com.twitter.conversions.time._
import com.twitter.util.{TempFolder, Time}
@RunWith(classOf[JUnitRunner])
class FileHandlerTest extends WordSpec with TempFolder {
def reader(filename: String) = {
new BufferedReader(new InputStreamReader(new FileInputStream(new File(folderName, filename))))
}
def writer(filename: String) = {
new OutputStreamWriter(new FileOutputStream(new File(folderName, filename)), "UTF-8")
}
"FileHandler" should {
val record1 = new javalog.LogRecord(Level.INFO, "first post!")
val record2 = new javalog.LogRecord(Level.INFO, "second post")
"honor append setting on logfiles" in {
withTempFolder {
val f = writer("test.log")
f.write("hello!\\n")
f.close
val handler = FileHandler(
filename = folderName + "/test.log",
rollPolicy = Policy.Hourly,
append = true,
formatter = BareFormatter
).apply()
handler.publish(record1)
val f2 = reader("test.log")
assert(f2.readLine === "hello!")
}
withTempFolder {
val f = writer("test.log")
f.write("hello!\\n")
f.close
val handler = FileHandler(
filename = folderName + "/test.log",
rollPolicy = Policy.Hourly,
append = false,
formatter = BareFormatter
).apply()
handler.publish(record1)
val f2 = reader("test.log")
assert(f2.readLine === "first post!")
}
}
// /* Test is commented out according to http://jira.local.twitter.com/browse/REPLA-618 */
//
// "respond to a sighup to reopen a logfile with sun.misc" in {
// try {
// val signalClass = Class.forName("sun.misc.Signal")
// val sighup = signalClass.getConstructor(classOf[String]).newInstance("HUP").asInstanceOf[Object]
// val raiseMethod = signalClass.getMethod("raise", signalClass)
// withTempFolder {
// val handler = FileHandler(
// filename = folderName + "/new.log",
// rollPolicy = Policy.SigHup,
// append = true,
// formatter = BareFormatter
// ).apply()
// val logFile = new File(folderName, "new.log")
// logFile.renameTo(new File(folderName, "old.log"))
// handler.publish(record1)
// raiseMethod.invoke(null, sighup)
// val newLogFile = new File(folderName, "new.log")
// newLogFile.exists() should eventually(be_==(true))
// handler.publish(record2)
// val oldReader = reader("old.log")
// assert(oldReader.readLine === "first post!")
// val newReader = reader("new.log")
// assert(newReader.readLine === "second post")
// }
// } catch {
// case ex: ClassNotFoundException =>
// }
// }
"roll logs on time" should {
"hourly" in {
withTempFolder {
val handler = FileHandler(
filename = folderName + "/test.log",
rollPolicy = Policy.Hourly,
append = true,
formatter = BareFormatter
).apply()
assert(handler.computeNextRollTime(1206769996722L) === Some(1206770400000L))
assert(handler.computeNextRollTime(1206770400000L) === Some(1206774000000L))
assert(handler.computeNextRollTime(1206774000001L) === Some(1206777600000L))
}
}
"weekly" in {
withTempFolder {
val handler = FileHandler(
filename = folderName + "/test.log",
rollPolicy = Policy.Weekly(Calendar.SUNDAY),
append = true,
formatter = new Formatter(timezone = Some("GMT-7:00"))
).apply()
assert(handler.computeNextRollTime(1250354734000L) === Some(1250406000000L))
assert(handler.computeNextRollTime(1250404734000L) === Some(1250406000000L))
assert(handler.computeNextRollTime(1250406001000L) === Some(1251010800000L))
assert(handler.computeNextRollTime(1250486000000L) === Some(1251010800000L))
assert(handler.computeNextRollTime(1250496000000L) === Some(1251010800000L))
}
}
}
// verify that at the proper time, the log file rolls and resets.
"roll logs into new files" in {
withTempFolder {
val handler = new FileHandler(folderName + "/test.log", Policy.Hourly, true, -1, BareFormatter, None)
Time.withCurrentTimeFrozen { time =>
handler.publish(record1)
val date = new Date(Time.now.inMilliseconds)
time.advance(1.hour)
handler.publish(record2)
handler.close()
assert(reader("test-" + handler.timeSuffix(date) + ".log").readLine === "first post!")
assert(reader("test.log").readLine === "second post")
}
}
}
"keep no more than N log files around" in {
withTempFolder {
assert(new File(folderName).list().length === 0)
val handler = FileHandler(
filename = folderName + "/test.log",
rollPolicy = Policy.Hourly,
append = true,
rotateCount = 2,
formatter = BareFormatter
).apply()
handler.publish(record1)
assert(new File(folderName).list().length === 1)
handler.roll()
handler.publish(record1)
assert(new File(folderName).list().length === 2)
handler.roll()
handler.publish(record1)
assert(new File(folderName).list().length === 2)
handler.close()
}
}
"ignores the target filename despite shorter filenames" in {
// even if the sort order puts the target filename before `rotateCount` other
// files, it should not be removed
withTempFolder {
assert(new File(folderName).list().length === 0)
val namePrefix = "test"
val name = namePrefix + ".log"
val handler = FileHandler(
filename = folderName + "/" + name,
rollPolicy = Policy.Hourly,
append = true,
rotateCount = 1,
formatter = BareFormatter
).apply()
// create a file without the '.log' suffix, which will sort before the target
new File(folderName, namePrefix).createNewFile()
def flush() = {
handler.publish(record1)
handler.roll()
assert(new File(folderName).list().length === 3)
}
// the target, 1 rotated file, and the short file should all remain
(1 to 5).foreach { _ => flush() }
val fileSet = new File(folderName).list().toSet
assert(fileSet.contains(name) === true)
assert(fileSet.contains(namePrefix) === true)
}
}
"correctly handles relative paths" in {
withTempFolder {
// user.dir will be replaced with the temp folder,
// and will be restored when the test is complete
val wdir = System.getProperty("user.dir")
try {
System.setProperty("user.dir", folderName)
val handler = FileHandler(
filename = "test.log", // Note relative path!
rollPolicy = Policy.Hourly,
append = true,
rotateCount = 2,
formatter = BareFormatter
).apply()
handler.publish(record1)
assert(new File(folderName).list().length === 1)
handler.roll()
handler.publish(record1)
assert(new File(folderName).list().length === 2)
handler.roll()
handler.publish(record1)
assert(new File(folderName).list().length === 2)
handler.close()
}
finally {
// restore user.dir to its original configuration
System.setProperty("user.dir", wdir)
}
}
}
"roll log files based on max size" in {
withTempFolder {
// roll the log on the 3rd write.
val maxSize = record1.getMessage.length * 3 - 1
assert(new File(folderName).list().length === 0)
val handler = FileHandler(
filename = folderName + "/test.log",
rollPolicy = Policy.MaxSize(maxSize.bytes),
append = true,
formatter = BareFormatter
).apply()
// move time forward so the rotated logfiles will have distinct names.
Time.withCurrentTimeFrozen { time =>
time.advance(1.second)
handler.publish(record1)
assert(new File(folderName).list().length === 1)
time.advance(1.second)
handler.publish(record1)
assert(new File(folderName).list().length === 1)
time.advance(1.second)
handler.publish(record1)
assert(new File(folderName).list().length === 2)
time.advance(1.second)
handler.publish(record1)
assert(new File(folderName).list().length === 2)
time.advance(1.second)
handler.publish(record1)
assert(new File(folderName).list().length === 3)
}
handler.close()
}
}
}
}
| travisbrown/util | util-logging/src/test/scala/com/twitter/logging/FileHandlerTest.scala | Scala | apache-2.0 | 9,899 |
package net.atos.kjc.fruitshop.checkoutsystem
object Checkout {
def name = "Eco Fruit shop checkout system"
}
| kjcaputa/hmrc-test | src/main/scala/net/atos/kjc/fruitshop/checkoutsystem/Checkout.scala | Scala | apache-2.0 | 113 |
package scala.collection.mutable
import org.scalacheck.Arbitrary._
import org.scalacheck.Prop._
import org.scalacheck._
object MutablePriorityQueueTest extends Properties("PriorityQueue") {
type E = Int // the element type used for most/all of the tests
def checkInvariant[A](pq: PriorityQueue[A])(implicit ord: Ordering[A]): Boolean = {
// The ordering invariant in the heap is that parent >= child.
// A child at index i has a parent at index i/2 in the priority
// queue's internal array. However, that array is padded with
// an extra slot in front so that the first real element is at
// index 1. The vector below is not padded, so subtract 1 from
// every index.
import ord._
val vec = pq.toVector // elements in same order as pq's internal array
2 until pq.size forall { i => vec(i/2-1) >= vec(i-1) }
}
property("newBuilder (in companion)") = forAll { list: List[E] =>
val builder = PriorityQueue.newBuilder[E]
for (x <- list) builder += x
val pq = builder.result()
checkInvariant(pq) &&
pq.dequeueAll == list.sorted.reverse
}
property("to[PriorityQueue]") = forAll { list: List[E] =>
val pq = list.to(PriorityQueue)
checkInvariant(pq) &&
pq.dequeueAll == list.sorted.reverse
}
property("apply (in companion)") = forAll { list: List[E] =>
val pq = PriorityQueue.apply(list: _*)
checkInvariant(pq) &&
pq.dequeueAll == list.sorted.reverse
}
property("size, isEmpty") = forAll { list: List[E] =>
val pq = PriorityQueue(list: _*)
pq.size == list.size && pq.isEmpty == list.isEmpty
}
property("+=") = forAll { (x: E, list: List[E]) =>
val pq = PriorityQueue(list: _*)
pq += x
checkInvariant(pq) &&
pq.dequeueAll == (x :: list).sorted.reverse
}
property("++= on empty") = forAll { list: List[E] =>
val pq = PriorityQueue.empty[E]
pq ++= list
checkInvariant(pq) &&
pq.dequeueAll == list.sorted.reverse
}
property("++=") = forAll { (list1: List[E], list2: List[E]) =>
val pq = PriorityQueue(list1: _*)
pq ++= list2
checkInvariant(pq) &&
pq.dequeueAll == (list1 ++ list2).sorted.reverse
}
property("reverse") = forAll { list: List[E] =>
val pq = PriorityQueue(list: _*).reverse
checkInvariant(pq)(implicitly[Ordering[E]].reverse) &&
pq.dequeueAll == list.sorted
}
property("reverse then ++=") = forAll { list: List[E] =>
val pq = PriorityQueue.empty[E].reverse ++= list
checkInvariant(pq)(implicitly[Ordering[E]].reverse) &&
pq.dequeueAll == list.sorted
}
property("reverse then +=") = forAll { (x: E, list: List[E]) =>
val pq = PriorityQueue(list: _*).reverse += x
checkInvariant(pq)(implicitly[Ordering[E]].reverse) &&
pq.dequeueAll == (x +: list).sorted
}
property("clone") = forAll { list: List[E] =>
val pq = PriorityQueue(list: _*)
val c = pq.clone()
(pq ne c) &&
checkInvariant(c) &&
c.dequeueAll == pq.dequeueAll
}
property("dequeue") = forAll { list: List[E] =>
list.nonEmpty ==> {
val pq = PriorityQueue(list: _*)
val x = pq.dequeue()
checkInvariant(pq) &&
x == list.max && pq.dequeueAll == list.sorted.reverse.tail
}
}
}
| scala/scala | test/scalacheck/scala/collection/mutable/MutablePriorityQueue.scala | Scala | apache-2.0 | 3,226 |
package sttp.client3.asynchttpclient.zio
import sttp.capabilities.zio.ZioStreams
import sttp.client3.SttpBackend
import sttp.client3.impl.zio.{ZioServerSentEvents, ZioTestBase}
import sttp.client3.internal._
import sttp.model.sse.ServerSentEvent
import sttp.client3.testing.ConvertToFuture
import sttp.client3.testing.streaming.StreamingTest
import zio.stream.Stream
import zio.{Chunk, Task}
class AsyncHttpClientZioStreamingTest extends StreamingTest[Task, ZioStreams] with ZioTestBase {
override val streams: ZioStreams = ZioStreams
override val backend: SttpBackend[Task, ZioStreams] =
runtime.unsafeRun(AsyncHttpClientZioBackend())
override implicit val convertToFuture: ConvertToFuture[Task] = convertZioTaskToFuture
override def bodyProducer(arrays: Iterable[Array[Byte]]): Stream[Throwable, Byte] =
Stream.fromChunks(arrays.map(Chunk.fromArray).toSeq: _*)
override def bodyConsumer(stream: Stream[Throwable, Byte]): Task[String] =
stream.runCollect.map(bytes => new String(bytes.toArray, Utf8))
override def sseConsumer(stream: Stream[Throwable, Byte]): Task[List[ServerSentEvent]] =
stream.viaFunction(ZioServerSentEvents.parse).runCollect.map(_.toList)
override protected def supportsStreamingMultipartParts: Boolean = false
}
| softwaremill/sttp | async-http-client-backend/zio/src/test/scala/sttp/client3/asynchttpclient/zio/AsyncHttpClientZioStreamingTest.scala | Scala | apache-2.0 | 1,273 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.rules.logical
import org.apache.flink.table.planner.expressions.utils.Func1
import org.apache.flink.table.planner.plan.optimize.program.{FlinkBatchProgram, FlinkHepRuleSetProgramBuilder, HEP_RULES_EXECUTION_TYPE}
import org.apache.flink.table.planner.utils.{TableConfigUtils, TableTestBase, TestFilterableTableSource}
import org.apache.calcite.plan.hep.HepMatchOrder
import org.apache.calcite.rel.rules.FilterProjectTransposeRule
import org.apache.calcite.tools.RuleSets
import org.junit.{Before, Test}
/**
* Test for [[PushFilterIntoTableSourceScanRule]].
*/
class PushFilterIntoTableSourceScanRuleTest extends TableTestBase {
private val util = batchTestUtil()
@Before
def setup(): Unit = {
util.buildBatchProgram(FlinkBatchProgram.DEFAULT_REWRITE)
val calciteConfig = TableConfigUtils.getCalciteConfig(util.tableEnv.getConfig)
calciteConfig.getBatchProgram.get.addLast(
"rules",
FlinkHepRuleSetProgramBuilder.newBuilder
.setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION)
.setHepMatchOrder(HepMatchOrder.BOTTOM_UP)
.add(RuleSets.ofList(PushFilterIntoTableSourceScanRule.INSTANCE,
FilterProjectTransposeRule.INSTANCE))
.build()
)
// name: STRING, id: LONG, amount: INT, price: DOUBLE
util.tableEnv.registerTableSource("MyTable", TestFilterableTableSource(true))
val ddl =
s"""
|CREATE TABLE VirtualTable (
| name STRING,
| id bigint,
| amount int,
| virtualField as amount + 1,
| price double
|) with (
| 'connector.type' = 'TestFilterableSource',
| 'is-bounded' = 'true'
|)
""".stripMargin
util.tableEnv.sqlUpdate(ddl)
}
@Test
def testCanPushDown(): Unit = {
util.verifyPlan("SELECT * FROM MyTable WHERE amount > 2")
}
@Test
def testCanPushDownWithVirtualColumn(): Unit = {
util.verifyPlan("SELECT * FROM VirtualTable WHERE amount > 2")
}
@Test
def testCannotPushDown(): Unit = {
// TestFilterableTableSource only accept predicates with `amount`
util.verifyPlan("SELECT * FROM MyTable WHERE price > 10")
}
@Test
def testCannotPushDownWithVirtualColumn(): Unit = {
// TestFilterableTableSource only accept predicates with `amount`
util.verifyPlan("SELECT * FROM VirtualTable WHERE price > 10")
}
@Test
def testPartialPushDown(): Unit = {
util.verifyPlan("SELECT * FROM MyTable WHERE amount > 2 AND price > 10")
}
@Test
def testPartialPushDownWithVirtualColumn(): Unit = {
util.verifyPlan("SELECT * FROM VirtualTable WHERE amount > 2 AND price > 10")
}
@Test
def testFullyPushDown(): Unit = {
util.verifyPlan("SELECT * FROM MyTable WHERE amount > 2 AND amount < 10")
}
@Test
def testFullyPushDownWithVirtualColumn(): Unit = {
util.verifyPlan("SELECT * FROM VirtualTable WHERE amount > 2 AND amount < 10")
}
@Test
def testPartialPushDown2(): Unit = {
util.verifyPlan("SELECT * FROM MyTable WHERE amount > 2 OR price > 10")
}
@Test
def testPartialPushDown2WithVirtualColumn(): Unit = {
util.verifyPlan("SELECT * FROM VirtualTable WHERE amount > 2 OR price > 10")
}
@Test
def testCannotPushDown3(): Unit = {
util.verifyPlan("SELECT * FROM MyTable WHERE amount > 2 OR amount < 10")
}
@Test
def testCannotPushDown3WithVirtualColumn(): Unit = {
util.verifyPlan("SELECT * FROM VirtualTable WHERE amount > 2 OR amount < 10")
}
@Test
def testUnconvertedExpression(): Unit = {
val sqlQuery =
"""
|SELECT * FROM MyTable WHERE
| amount > 2 AND id < 100 AND CAST(amount AS BIGINT) > 10
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testWithUdf(): Unit = {
util.addFunction("myUdf", Func1)
util.verifyPlan("SELECT * FROM MyTable WHERE amount > 2 AND myUdf(amount) < 32")
}
}
| gyfora/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/rules/logical/PushFilterIntoTableSourceScanRuleTest.scala | Scala | apache-2.0 | 4,751 |
package akka.io.spdy
import akka.actor._
import com.typesafe.config.Config
import spray.can.HttpManager
| ktoso/akka-io-spdy-playground | src/main/scala/akka/io/Spdy.scala | Scala | apache-2.0 | 108 |
import org.infinispan.notifications.Listener
import org.infinispan.notifications.cachelistener.annotation.{CacheEntryInvalidated, CacheEntryVisited}
import org.infinispan.notifications.cachelistener.event.{CacheEntryInvalidatedEvent, CacheEntryVisitedEvent}
@Listener
class CacheListener {
@CacheEntryVisited
def cacheEntryVisited(event: CacheEntryVisitedEvent[_, _]): Unit =
println(s"Entry Visited Event: $event")
@CacheEntryInvalidated
def cacheEntryInvalidated(event: CacheEntryInvalidatedEvent[_, _]): Unit =
println(s"Invalidated Event: $event")
}
| kazuhira-r/infinispan-examples | infinispan-l1-caching/src/main/scala/CacheListener.scala | Scala | mit | 572 |
package mesosphere.marathon.api
import javax.inject.Inject
import com.twitter.util.NonFatal
import mesosphere.marathon.core.group.GroupManager
import mesosphere.marathon.core.task.{ Task, TaskStateOp }
import mesosphere.marathon.core.task.tracker.{ TaskStateOpProcessor, TaskTracker }
import mesosphere.marathon.plugin.auth.{ Authenticator, Authorizer, Identity, UpdateRunSpec }
import mesosphere.marathon.state._
import mesosphere.marathon.upgrade.DeploymentPlan
import mesosphere.marathon.{ MarathonConf, MarathonSchedulerService, UnknownAppException }
import org.slf4j.LoggerFactory
import scala.async.Async.{ async, await }
import scala.concurrent.{ ExecutionContext, Future }
class TaskKiller @Inject() (
taskTracker: TaskTracker,
stateOpProcessor: TaskStateOpProcessor,
groupManager: GroupManager,
service: MarathonSchedulerService,
val config: MarathonConf,
val authenticator: Authenticator,
val authorizer: Authorizer) extends AuthResource {
private[this] val log = LoggerFactory.getLogger(getClass)
@SuppressWarnings(Array("all")) // async/await
def kill(
appId: PathId,
findToKill: (Iterable[Task] => Iterable[Task]),
wipe: Boolean = false)(implicit identity: Identity): Future[Iterable[Task]] = {
result(groupManager.app(appId)) match {
case Some(app) =>
checkAuthorization(UpdateRunSpec, app)
// TODO: We probably want to pass the execution context as an implcit.
import scala.concurrent.ExecutionContext.Implicits.global
async { // linter:ignore UnnecessaryElseBranch
val allTasks = await(taskTracker.appTasks(appId))
val foundTasks = findToKill(allTasks)
if (wipe) await(expunge(foundTasks))
val launchedTasks = foundTasks.filter(_.launched.isDefined)
if (launchedTasks.nonEmpty) await(service.killTasks(appId, launchedTasks))
// Return killed *and* expunged tasks.
// The user only cares that all tasks won't exist eventually. That's why we send all tasks back and not just
// the killed tasks.
foundTasks
}
case None => Future.failed(UnknownAppException(appId))
}
}
private[this] def expunge(tasks: Iterable[Task])(implicit ec: ExecutionContext): Future[Unit] = {
// Note: We process all tasks sequentially.
tasks.foldLeft(Future.successful(())) { (resultSoFar, nextTask) =>
resultSoFar.flatMap { _ =>
log.info("Expunging {}", nextTask.taskId)
stateOpProcessor.process(TaskStateOp.ForceExpunge(nextTask.taskId)).map(_ => ()).recover {
case NonFatal(cause) =>
log.info("Failed to expunge {}, got: {}", Array[Object](nextTask.taskId, cause): _*)
}
}
}
}
def killAndScale(
appId: PathId,
findToKill: (Iterable[Task] => Iterable[Task]),
force: Boolean)(implicit identity: Identity): Future[DeploymentPlan] = {
killAndScale(Map(appId -> findToKill(taskTracker.appTasksLaunchedSync(appId))), force)
}
def killAndScale(
appTasks: Map[PathId, Iterable[Task]],
force: Boolean)(implicit identity: Identity): Future[DeploymentPlan] = {
def scaleApp(app: AppDefinition): AppDefinition = {
checkAuthorization(UpdateRunSpec, app)
appTasks.get(app.id).fold(app) { toKill => app.copy(instances = app.instances - toKill.size) }
}
def updateGroup(group: Group): Group = {
group.copy(apps = group.apps.mapValues(scaleApp), groupsById = group.groupsById.mapValues(updateGroup))
}
def killTasks = groupManager.update(
PathId.empty,
updateGroup,
Timestamp.now(),
force = force,
toKill = appTasks
)
appTasks.keys.find(id => !taskTracker.hasAppTasksSync(id))
.map(id => Future.failed(UnknownAppException(id)))
.getOrElse(killTasks)
}
}
| timcharper/marathon | src/main/scala/mesosphere/marathon/api/TaskKiller.scala | Scala | apache-2.0 | 3,839 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.columnar
import java.nio.charset.StandardCharsets
import java.sql.{Date, Timestamp}
import org.apache.spark.sql.{DataFrame, QueryTest, Row}
import org.apache.spark.sql.catalyst.expressions.AttributeSet
import org.apache.spark.sql.catalyst.plans.physical.HashPartitioning
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.test.SQLTestData._
import org.apache.spark.sql.types._
import org.apache.spark.storage.StorageLevel._
class InMemoryColumnarQuerySuite extends QueryTest with SharedSQLContext {
import testImplicits._
setupTestData()
private def cachePrimitiveTest(data: DataFrame, dataType: String) {
data.createOrReplaceTempView(s"testData$dataType")
val storageLevel = MEMORY_ONLY
val plan = spark.sessionState.executePlan(data.logicalPlan).sparkPlan
val inMemoryRelation = InMemoryRelation(useCompression = true, 5, storageLevel, plan, None)
assert(inMemoryRelation.cachedColumnBuffers.getStorageLevel == storageLevel)
inMemoryRelation.cachedColumnBuffers.collect().head match {
case _: CachedBatch =>
case other => fail(s"Unexpected cached batch type: ${other.getClass.getName}")
}
checkAnswer(inMemoryRelation, data.collect().toSeq)
}
private def testPrimitiveType(nullability: Boolean): Unit = {
val dataTypes = Seq(BooleanType, ByteType, ShortType, IntegerType, LongType,
FloatType, DoubleType, DateType, TimestampType, DecimalType(25, 5), DecimalType(6, 5))
val schema = StructType(dataTypes.zipWithIndex.map { case (dataType, index) =>
StructField(s"col$index", dataType, nullability)
})
val rdd = spark.sparkContext.parallelize((1 to 10).map(i => Row(
if (nullability && i % 3 == 0) null else if (i % 2 == 0) true else false,
if (nullability && i % 3 == 0) null else i.toByte,
if (nullability && i % 3 == 0) null else i.toShort,
if (nullability && i % 3 == 0) null else i.toInt,
if (nullability && i % 3 == 0) null else i.toLong,
if (nullability && i % 3 == 0) null else (i + 0.25).toFloat,
if (nullability && i % 3 == 0) null else (i + 0.75).toDouble,
if (nullability && i % 3 == 0) null else new Date(i),
if (nullability && i % 3 == 0) null else new Timestamp(i * 1000000L),
if (nullability && i % 3 == 0) null else BigDecimal(Long.MaxValue.toString + ".12345"),
if (nullability && i % 3 == 0) null
else new java.math.BigDecimal(s"${i % 9 + 1}" + ".23456")
)))
cachePrimitiveTest(spark.createDataFrame(rdd, schema), "primitivesDateTimeStamp")
}
private def tesNonPrimitiveType(nullability: Boolean): Unit = {
val struct = StructType(StructField("f1", FloatType, false) ::
StructField("f2", ArrayType(BooleanType), true) :: Nil)
val schema = StructType(Seq(
StructField("col0", StringType, nullability),
StructField("col1", ArrayType(IntegerType), nullability),
StructField("col2", ArrayType(ArrayType(IntegerType)), nullability),
StructField("col3", MapType(StringType, IntegerType), nullability),
StructField("col4", struct, nullability)
))
val rdd = spark.sparkContext.parallelize((1 to 10).map(i => Row(
if (nullability && i % 3 == 0) null else s"str${i}: test cache.",
if (nullability && i % 3 == 0) null else (i * 100 to i * 100 + i).toArray,
if (nullability && i % 3 == 0) null
else Array(Array(i, i + 1), Array(i * 100 + 1, i * 100, i * 100 + 2)),
if (nullability && i % 3 == 0) null else (i to i + i).map(j => s"key$j" -> j).toMap,
if (nullability && i % 3 == 0) null else Row((i + 0.25).toFloat, Seq(true, false, null))
)))
cachePrimitiveTest(spark.createDataFrame(rdd, schema), "StringArrayMapStruct")
}
test("primitive type with nullability:true") {
testPrimitiveType(true)
}
test("primitive type with nullability:false") {
testPrimitiveType(false)
}
test("non-primitive type with nullability:true") {
val schemaNull = StructType(Seq(StructField("col", NullType, true)))
val rddNull = spark.sparkContext.parallelize((1 to 10).map(i => Row(null)))
cachePrimitiveTest(spark.createDataFrame(rddNull, schemaNull), "Null")
tesNonPrimitiveType(true)
}
test("non-primitive type with nullability:false") {
tesNonPrimitiveType(false)
}
test("simple columnar query") {
val plan = spark.sessionState.executePlan(testData.logicalPlan).sparkPlan
val scan = InMemoryRelation(useCompression = true, 5, MEMORY_ONLY, plan, None)
checkAnswer(scan, testData.collect().toSeq)
}
test("default size avoids broadcast") {
// TODO: Improve this test when we have better statistics
sparkContext.parallelize(1 to 10).map(i => TestData(i, i.toString))
.toDF().createOrReplaceTempView("sizeTst")
spark.catalog.cacheTable("sizeTst")
assert(
spark.table("sizeTst").queryExecution.analyzed.stats.sizeInBytes >
spark.conf.get(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD))
}
test("projection") {
val plan = spark.sessionState.executePlan(testData.select('value, 'key).logicalPlan).sparkPlan
val scan = InMemoryRelation(useCompression = true, 5, MEMORY_ONLY, plan, None)
checkAnswer(scan, testData.collect().map {
case Row(key: Int, value: String) => value -> key
}.map(Row.fromTuple))
}
test("access only some column of the all of columns") {
val df = spark.range(1, 100).map(i => (i, (i + 1).toFloat)).toDF("i", "f")
df.cache
df.count // forced to build cache
assert(df.filter("f <= 10.0").count == 9)
}
test("SPARK-1436 regression: in-memory columns must be able to be accessed multiple times") {
val plan = spark.sessionState.executePlan(testData.logicalPlan).sparkPlan
val scan = InMemoryRelation(useCompression = true, 5, MEMORY_ONLY, plan, None)
checkAnswer(scan, testData.collect().toSeq)
checkAnswer(scan, testData.collect().toSeq)
}
test("SPARK-1678 regression: compression must not lose repeated values") {
checkAnswer(
sql("SELECT * FROM repeatedData"),
repeatedData.collect().toSeq.map(Row.fromTuple))
spark.catalog.cacheTable("repeatedData")
checkAnswer(
sql("SELECT * FROM repeatedData"),
repeatedData.collect().toSeq.map(Row.fromTuple))
}
test("with null values") {
checkAnswer(
sql("SELECT * FROM nullableRepeatedData"),
nullableRepeatedData.collect().toSeq.map(Row.fromTuple))
spark.catalog.cacheTable("nullableRepeatedData")
checkAnswer(
sql("SELECT * FROM nullableRepeatedData"),
nullableRepeatedData.collect().toSeq.map(Row.fromTuple))
}
test("SPARK-2729 regression: timestamp data type") {
val timestamps = (0 to 3).map(i => Tuple1(new Timestamp(i))).toDF("time")
timestamps.createOrReplaceTempView("timestamps")
checkAnswer(
sql("SELECT time FROM timestamps"),
timestamps.collect().toSeq)
spark.catalog.cacheTable("timestamps")
checkAnswer(
sql("SELECT time FROM timestamps"),
timestamps.collect().toSeq)
}
test("SPARK-3320 regression: batched column buffer building should work with empty partitions") {
checkAnswer(
sql("SELECT * FROM withEmptyParts"),
withEmptyParts.collect().toSeq.map(Row.fromTuple))
spark.catalog.cacheTable("withEmptyParts")
checkAnswer(
sql("SELECT * FROM withEmptyParts"),
withEmptyParts.collect().toSeq.map(Row.fromTuple))
}
test("SPARK-4182 Caching complex types") {
complexData.cache().count()
// Shouldn't throw
complexData.count()
complexData.unpersist()
}
test("decimal type") {
// Casting is required here because ScalaReflection can't capture decimal precision information.
val df = (1 to 10)
.map(i => Tuple1(Decimal(i, 15, 10).toJavaBigDecimal))
.toDF("dec")
.select($"dec" cast DecimalType(15, 10))
assert(df.schema.head.dataType === DecimalType(15, 10))
df.cache().createOrReplaceTempView("test_fixed_decimal")
checkAnswer(
sql("SELECT * FROM test_fixed_decimal"),
(1 to 10).map(i => Row(Decimal(i, 15, 10).toJavaBigDecimal)))
}
test("test different data types") {
// Create the schema.
val struct =
StructType(
StructField("f1", FloatType, true) ::
StructField("f2", ArrayType(BooleanType), true) :: Nil)
val dataTypes =
Seq(StringType, BinaryType, NullType, BooleanType,
ByteType, ShortType, IntegerType, LongType,
FloatType, DoubleType, DecimalType(25, 5), DecimalType(6, 5),
DateType, TimestampType, ArrayType(IntegerType), struct)
val fields = dataTypes.zipWithIndex.map { case (dataType, index) =>
StructField(s"col$index", dataType, true)
}
val allColumns = fields.map(_.name).mkString(",")
val schema = StructType(fields)
// Create an RDD for the schema
val rdd =
sparkContext.parallelize(1 to 10000, 10).map { i =>
Row(
s"str$i: test cache.",
s"binary$i: test cache.".getBytes(StandardCharsets.UTF_8),
null,
i % 2 == 0,
i.toByte,
i.toShort,
i,
Long.MaxValue - i.toLong,
(i + 0.25).toFloat,
i + 0.75,
BigDecimal(Long.MaxValue.toString + ".12345"),
new java.math.BigDecimal(s"${i % 9 + 1}" + ".23456"),
new Date(i),
new Timestamp(i * 1000000L),
i to i + 10,
Row((i - 0.25).toFloat, Seq(true, false, null)))
}
spark.createDataFrame(rdd, schema).createOrReplaceTempView("InMemoryCache_different_data_types")
// Cache the table.
sql("cache table InMemoryCache_different_data_types")
// Make sure the table is indeed cached.
spark.table("InMemoryCache_different_data_types").queryExecution.executedPlan
assert(
spark.catalog.isCached("InMemoryCache_different_data_types"),
"InMemoryCache_different_data_types should be cached.")
// Issue a query and check the results.
checkAnswer(
sql(s"SELECT DISTINCT ${allColumns} FROM InMemoryCache_different_data_types"),
spark.table("InMemoryCache_different_data_types").collect())
spark.catalog.dropTempView("InMemoryCache_different_data_types")
}
test("SPARK-10422: String column in InMemoryColumnarCache needs to override clone method") {
val df = spark.range(1, 100).selectExpr("id % 10 as id")
.rdd.map(id => Tuple1(s"str_$id")).toDF("i")
val cached = df.cache()
// count triggers the caching action. It should not throw.
cached.count()
// Make sure, the DataFrame is indeed cached.
assert(spark.sharedState.cacheManager.lookupCachedData(cached).nonEmpty)
// Check result.
checkAnswer(
cached,
spark.range(1, 100).selectExpr("id % 10 as id")
.rdd.map(id => Tuple1(s"str_$id")).toDF("i")
)
// Drop the cache.
cached.unpersist()
}
test("SPARK-10859: Predicates pushed to InMemoryColumnarTableScan are not evaluated correctly") {
val data = spark.range(10).selectExpr("id", "cast(id as string) as s")
data.cache()
assert(data.count() === 10)
assert(data.filter($"s" === "3").count() === 1)
}
test("SPARK-14138: Generated SpecificColumnarIterator can exceed JVM size limit for cached DF") {
val length1 = 3999
val columnTypes1 = List.fill(length1)(IntegerType)
val columnarIterator1 = GenerateColumnAccessor.generate(columnTypes1)
// SPARK-16664: the limit of janino is 8117
val length2 = 8117
val columnTypes2 = List.fill(length2)(IntegerType)
val columnarIterator2 = GenerateColumnAccessor.generate(columnTypes2)
}
test("SPARK-17549: cached table size should be correctly calculated") {
val data = spark.sparkContext.parallelize(1 to 10, 5).toDF()
val plan = spark.sessionState.executePlan(data.logicalPlan).sparkPlan
val cached = InMemoryRelation(true, 5, MEMORY_ONLY, plan, None)
// Materialize the data.
val expectedAnswer = data.collect()
checkAnswer(cached, expectedAnswer)
// Check that the right size was calculated.
assert(cached.batchStats.value === expectedAnswer.size * INT.defaultSize)
}
test("access primitive-type columns in CachedBatch without whole stage codegen") {
// whole stage codegen is not applied to a row with more than WHOLESTAGE_MAX_NUM_FIELDS fields
withSQLConf(SQLConf.WHOLESTAGE_MAX_NUM_FIELDS.key -> "2") {
val data = Seq(null, true, 1.toByte, 3.toShort, 7, 15.toLong,
31.25.toFloat, 63.75, new Date(127), new Timestamp(255000000L), null)
val dataTypes = Seq(NullType, BooleanType, ByteType, ShortType, IntegerType, LongType,
FloatType, DoubleType, DateType, TimestampType, IntegerType)
val schemas = dataTypes.zipWithIndex.map { case (dataType, index) =>
StructField(s"col$index", dataType, true)
}
val rdd = sparkContext.makeRDD(Seq(Row.fromSeq(data)))
val df = spark.createDataFrame(rdd, StructType(schemas))
val row = df.persist.take(1).apply(0)
checkAnswer(df, row)
}
}
test("access decimal/string-type columns in CachedBatch without whole stage codegen") {
withSQLConf(SQLConf.WHOLESTAGE_MAX_NUM_FIELDS.key -> "2") {
val data = Seq(BigDecimal(Long.MaxValue.toString + ".12345"),
new java.math.BigDecimal("1234567890.12345"),
new java.math.BigDecimal("1.23456"),
"test123"
)
val schemas = Seq(
StructField("col0", DecimalType(25, 5), true),
StructField("col1", DecimalType(15, 5), true),
StructField("col2", DecimalType(6, 5), true),
StructField("col3", StringType, true)
)
val rdd = sparkContext.makeRDD(Seq(Row.fromSeq(data)))
val df = spark.createDataFrame(rdd, StructType(schemas))
val row = df.persist.take(1).apply(0)
checkAnswer(df, row)
}
}
test("access non-primitive-type columns in CachedBatch without whole stage codegen") {
withSQLConf(SQLConf.WHOLESTAGE_MAX_NUM_FIELDS.key -> "2") {
val data = Seq((1 to 10).toArray,
Array(Array(10, 11), Array(100, 111, 123)),
Map("key1" -> 111, "key2" -> 222),
Row(1.25.toFloat, Seq(true, false, null))
)
val struct = StructType(StructField("f1", FloatType, false) ::
StructField("f2", ArrayType(BooleanType), true) :: Nil)
val schemas = Seq(
StructField("col0", ArrayType(IntegerType), true),
StructField("col1", ArrayType(ArrayType(IntegerType)), true),
StructField("col2", MapType(StringType, IntegerType), true),
StructField("col3", struct, true)
)
val rdd = sparkContext.makeRDD(Seq(Row.fromSeq(data)))
val df = spark.createDataFrame(rdd, StructType(schemas))
val row = df.persist.take(1).apply(0)
checkAnswer(df, row)
}
}
test("InMemoryTableScanExec should return correct output ordering and partitioning") {
val df1 = Seq((0, 0), (1, 1)).toDF
.repartition(col("_1")).sortWithinPartitions(col("_1")).persist
val df2 = Seq((0, 0), (1, 1)).toDF
.repartition(col("_1")).sortWithinPartitions(col("_1")).persist
// Because two cached dataframes have the same logical plan, this is a self-join actually.
// So we force one of in-memory relation to alias its output. Then we can test if original and
// aliased in-memory relations have correct ordering and partitioning.
val joined = df1.joinWith(df2, df1("_1") === df2("_1"))
val inMemoryScans = joined.queryExecution.executedPlan.collect {
case m: InMemoryTableScanExec => m
}
inMemoryScans.foreach { inMemoryScan =>
val sortedAttrs = AttributeSet(inMemoryScan.outputOrdering.flatMap(_.references))
assert(sortedAttrs.subsetOf(inMemoryScan.outputSet))
val partitionedAttrs =
inMemoryScan.outputPartitioning.asInstanceOf[HashPartitioning].references
assert(partitionedAttrs.subsetOf(inMemoryScan.outputSet))
}
}
test("SPARK-20356: pruned InMemoryTableScanExec should have correct ordering and partitioning") {
withSQLConf("spark.sql.shuffle.partitions" -> "200") {
val df1 = Seq(("a", 1), ("b", 1), ("c", 2)).toDF("item", "group")
val df2 = Seq(("a", 1), ("b", 2), ("c", 3)).toDF("item", "id")
val df3 = df1.join(df2, Seq("item")).select($"id", $"group".as("item")).distinct()
df3.unpersist()
val agg_without_cache = df3.groupBy($"item").count()
df3.cache()
val agg_with_cache = df3.groupBy($"item").count()
checkAnswer(agg_without_cache, agg_with_cache)
}
}
}
| SHASHANKB/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala | Scala | apache-2.0 | 17,464 |
package com.github.mdr.ascii.diagram.parser
import org.scalatest.{ Matchers, FlatSpec }
import com.github.mdr.ascii.graph.Graph
import com.github.mdr.ascii.diagram.Diagram
class Issue1DirectedEdgeNotParsedTest extends FlatSpec with Matchers {
"Parser" should "detect directed edge" in {
val diagram = Diagram("""
+-------+
| op1 |
+-------+
|
|
----
|
v
+-----+
|gbk1 |
+-----+ """)
{
val boxes = diagram.allBoxes
val Some(op1) = boxes.find(_.text.contains("op1"))
val Some(gbk1) = boxes.find(_.text.contains("gbk1"))
val List(edge) = op1.edges
edge.hasArrow1 should be(false)
edge.hasArrow2 should be(true)
}
{
val graph = Graph.fromDiagram(diagram)
val Some(op1) = graph.vertices.find(_ contains ("op1"))
val Some(gbk1) = graph.vertices.find(_ contains ("gbk1"))
val List(edge) = graph.edges
edge should be((op1, gbk1))
}
}
} | jlmauduy/ascii-graphs | src/test/scala/com/github/mdr/ascii/diagram/parser/Issue1DirectedEdgeNotParsedTest.scala | Scala | mit | 1,134 |
package io.github.reggert.reb4s.charclass
import java.lang.Character.UnicodeBlock
import io.github.reggert.reb4s.{Expression, Alternative, Quantifiable, Sequenceable}
/**
* Base class representing an expression that matches a single character
* within a class of characters.
*/
@SerialVersionUID(1L)
abstract class CharClass extends Expression
with Alternative
with Sequenceable
with Quantifiable
{
/**
* Returns an expressing matching a single character that is not within
* the class of characters matched by this expression.
*/
final def unary_~ = negated
/**
* Returns an expressing matching a single character that is not within
* the class of characters matched by this expression.
*/
def negated : CharClass
/**
* The regular expression string that can be used within square brackets
* to merge with other character classes.
*/
protected[charclass] def unitableForm : String
/**
* The regular expression string that can be used independently of square
* brackets.
*/
protected[charclass] def independentForm : String
final override def expression = independentForm
/**
* Returns the intersection of this character class with the
* specified character class.
*/
def && (right : CharClass) : Intersection =
new Intersection(List(this, right))
/**
* Returns the intersection of this character class with the
* specified character classes.
*/
def && (right : Intersection) : Intersection =
new Intersection(this::right.supersets)
/**
* Returns the intersection of this character class with the
* specified character class.
*/
final def intersect (right : CharClass) = this && right
/**
* Returns the intersection of this character class with the
* specified character classes.
*/
final def intersect (right : Intersection) = this && right
/**
* Returns the union of this character class with the
* specified character class.
*/
def || (right : Union) : Union =
new Union(this::right.subsets)
/**
* Returns the union of this character class with the
* specified character classes.
*/
def || (right : CharClass) : Union =
new Union(List(this, right))
/**
* Returns the union of this character class with the
* specified character classes.
*/
final def union (right : Union) : Union = this || right
/**
* Returns the union of this character class with the
* specified character class.
*/
final def union (right : CharClass) : Union = this || right
/**
* Returns the union of this character class with the
* specified character classes.
*/
final def or (right : Union) : Union = this || right
/**
* Returns the union of this character class with the
* specified character class.
*/
final def or (right : CharClass) : Union = this || right
protected[reb4s] override final def boundedLength = Some(1)
protected[reb4s] override final def repetitionInvalidatesBounds : Boolean = false
protected[reb4s] override final def possiblyZeroLength : Boolean = false
}
/**
* Factory object for creating character class expressions.
*/
object CharClass
{
/**
* Creates an expression matching a single specified character.
*/
def char(c : Char) = new SingleChar(c)
/**
* Creates an expression that matches a single occurrence of any of the
* specified characters.
*/
def chars(cs : Set[Char]) = new MultiChar(cs)
/**
* Creates an expression that matches a single occurrence of any of the
* specified characters.
*/
def chars(cs : Traversable[Char]) = new MultiChar(Set.empty ++ cs)
/**
* Creates an expression that matches a single occurence of any character
* within the specified range.
*
* @param first the lowest value character that will match
* @param last the highest value character that will match
* @throws IllegalArgumentException if {first} is not less than than {last}.
*/
def range(first : Char, last : Char) = new CharRange(first, last)
/**
* Module containing Perl-style predefined character classes.
*/
object Perl
{
/**
* Perl-style character class that matches a single decimal digit.
*/
val Digit = new PredefinedClass('d')
/**
* Perl-style character class that matches whitespace.
*/
val Space = new PredefinedClass('s')
/**
* Perl-style character class that matches "word" characters.
*/
val Word = new PredefinedClass('w')
}
/**
* Module containing POSIX-style predefined character classes.
*/
object Posix
{
/**
* POSIX-style character class that matches a lowercase letter.
*/
val Lower = new NamedPredefinedClass("Lower")
/**
* POSIX-style character class that matches an uppercase letter.
*/
val Upper = new NamedPredefinedClass("Upper")
/**
* POSIX-style character class that matches an alphabetical letter.
*/
val Alpha = new NamedPredefinedClass("Alpha")
/**
* POSIX-style character class that matches a decimal digit.
*/
val Digit = new NamedPredefinedClass("Digit")
/**
* POSIX-style character class that matches letters or digits.
*/
val Alnum = new NamedPredefinedClass("Alnum")
/**
* POSIX-style character class that matches a punctuation character.
*/
val Punct = new NamedPredefinedClass("Punct")
/**
* POSIX-style character class that matches a graphical character.
*/
val Graph = new NamedPredefinedClass("Graph")
/**
* POSIX-style character class that matches any printable character.
*/
val Print = new NamedPredefinedClass("Print")
/**
* POSIX-style character class that matches a space or tab.
*/
val Blank = new NamedPredefinedClass("Blank")
/**
* POSIX-style character class that matches a control character.
*/
val Control = new NamedPredefinedClass("Cntrl")
/**
* POSIX-style character class that matches a hexadecimal digit.
*/
val HexDigit = new NamedPredefinedClass("XDigit")
/**
* POSIX-style character class that matches any whitespace character.
*/
val Space = new NamedPredefinedClass("Space")
}
/**
* Module containing predefined character classes matching character traits
* defined by the java.lang.Character class.
*/
object Java
{
/**
* Matches any single lowercase letter.
*/
val LowerCase = new NamedPredefinedClass("javaLowerCase")
/**
* Matches any single uppercase letter.
*/
val UpperCase = new NamedPredefinedClass("javaUpperCase")
/**
* Matches any single whitespace character.
*/
val Whitespace = new NamedPredefinedClass("javaWhitespace")
/**
* Matches any single "mirrored" character.
*/
val Mirrored = new NamedPredefinedClass("javaMirrored")
}
/**
* Module containing predefined character classes matching a single
* character that has certain traits defined by the Unicode specification.
*/
object Unicode
{
private type UnicodeBlock = java.lang.Character.UnicodeBlock
private def % (className : String) = new NamedPredefinedClass(className)
/**
* Creates a character class matching any single character within the
* specified Unicode block.
*/
def block(unicodeBlock : UnicodeBlock) = %("In" + unicodeBlock.toString)
/*
From the Unicode Specification, version 4.0.0, the Unicode categories are:
Lu = Letter, uppercase
Ll = Letter, lowercase
Lt = Letter, titlecase
Lm = Letter, modifier
Lo = Letter, other
Mn = Mark, nonspacing
Mc = Mark, spacing combining
Me = Mark, enclosing
Nd = Number, decimal digit
Nl = Number, letter
No = Number, other
Zs = Separator, space
Zl = Separator, line
Zp = Separator, paragraph
Cc = Other, control
Cf = Other, format
Cs = Other, surrogate
Co = Other, private use
Cn = Other, not assigned (including noncharacters)
Pc = Punctuation, connector
Pd = Punctuation, dash
Ps = Punctuation, open
Pe = Punctuation, close
Pi = Punctuation, initial quote (may behave like Ps or Pe depending on usage)
Pf = Punctuation, final quote (may behave like Ps or Pe depending on usage)
Po = Punctuation, other
Sm = Symbol, math
Sc = Symbol, currency
Sk = Symbol, modifier
So = Symbol, other
*/
/**
* Module containing predefined character classes matching any single
* character defined as a "letter" by the Unicode specification.
*/
object Letter
{
/**
* Matches uppercase letters.
*/
val Uppercase = %("Lu")
/**
* Matches lowercase letters.
*/
val Lowercase = %("Ll")
/**
* Matches titlecase letters.
*/
val Titlecase = %("Lt")
/**
* Matches letter modifiers.
*/
val Modifier = %("Lm")
/**
* Matches "other" letters defined by Unicode.
*/
val Other = %("Lo")
}
/**
* Module containing predefined character classes matching any single
* character defined as a "mark" by the Unicode specification.
*/
object Mark
{
/**
* Matches nonspacing marks.
*/
val Nonspacing = %("Mn")
/**
* Matches spacing-combining marks.
*/
val SpacingCombining = %("Mc")
/**
* Matches enclosing marks.
*/
val Enclosing = %("Me")
}
/**
* Module containing predefined character classes matching any single
* character defined as a "number" by the Unicode specification.
*/
object Number
{
/**
* Matches decimal digits.
*/
val DecimalDigit = %("Nd")
/**
* Matches letter characters used as digits.
*/
val Letter = %("Nl")
/**
* Matches "other" digit characters defined by Unicode.
*/
val Other = %("No")
}
/**
* Module containing predefined character classes matching any single
* character defined as a "separator" by the Unicode specification.
*/
object Separator
{
/**
* Mataches spaces.
*/
val Space = %("Zs")
/**
* Matches line breaks.
*/
val Line = %("Zl")
/**
* Matches paragraph breaks.
*/
val Paragraph = %("Zp")
}
/**
* Module containing predefined character classes matching any single
* character that does not fit into any other category defined by the
* Unicode specification.
*/
object Other
{
/**
* Matches control characters.
*/
val Control = %("Cc")
/**
* Matches formatting characters.
*/
val Format = %("Cf")
/**
* Matches surrogate characters.
*/
val Surrogate = %("Cs")
/**
* Matches characters defined for private use.
*/
val PrivateUse = %("Co")
/**
* Matches unassigned characters.
*/
val NotAssigned = %("Cn")
}
/**
* Module containing predefined character classes matching any single
* character that is defined as "punctuation" by the Unicode
* specification.
*/
object Punctuation
{
/**
* Matches connectors.
*/
val Connector = %("Pc")
/**
* Matches dashes.
*/
val Dash = %("Pd")
/**
* Matches "opening" punctuation.
*/
val Open = %("Po")
/**
* Matches "closing" punctuation.
*/
val Close = %("Pe")
/**
* Matches initial quotes.
*/
val InitialQuote = %("Pi")
/**
* Matches closing quotes.
*/
val FinalQuote = %("Pf")
/**
* Matches other punctuation.
*/
val Other = %("Po")
}
/**
* Module containing predefined character classes matching any single
* character that is defined as a "symbol" by the Unicode
* specification.
*/
object Symbol
{
/**
* Matches mathematical symbols.
*/
val Math = %("Sm")
/**
* Matches currency symbols.
*/
val Currency = %("Sc")
/**
* Matches symbol modifiers.
*/
val Modifier = %("Sk")
/**
* Matches other symbols.
*/
val Other = %("So")
}
}
}
| reggert/reb4s | src/main/scala/io/github/reggert/reb4s/charclass/CharClass.scala | Scala | lgpl-3.0 | 11,880 |
package actors
import actors.Classifier.{ClassificationResult, Classify}
import actors.FetchResponseHandler.FetchResponseTimeout
import actors.TrainingModelResponseHandler.TrainingModelRetrievalTimeout
import akka.actor.{ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestKit, TestProbe}
import controllers.OAuthKeys
import org.scalatest.{BeforeAndAfterEach, MustMatchers, WordSpecLike}
import play.api.libs.oauth.{ConsumerKey, RequestToken}
import twitter.LabeledTweet
import scala.concurrent.duration._
class ClassifierSpec extends TestKit(ActorSystem("ClassifierSpecAS"))
with ImplicitSender
with WordSpecLike
with MustMatchers
with BeforeAndAfterEach
with SparkTestContext {
"A classifier" should {
val oAuthKeys = OAuthKeys(ConsumerKey("",""), RequestToken("",""))
"return a list of classified tweets" in {
val twitterHandler = system.actorOf(Props[TwitterHandlerProxyStub], "twitter-handler")
val onlineTrainer = system.actorOf(Props[OnlineTrainerProxyStub], "online-trainer")
val batchTrainer = system.actorOf(Props[BatchTrainerProxyStub], "batch-trainer")
val estimator = new PredictorProxyStub()
val classifier = system.actorOf(Props(new Classifier(sc, twitterHandler, onlineTrainer, batchTrainer, estimator)))
val probe = TestProbe()
within(1 seconds) {
probe.send(classifier, Classify("apple", oAuthKeys))
val result = probe.expectMsgType[ClassificationResult]
val labeledTweets = Array(LabeledTweet("The new Apple iPhone 6s is awesome", "1.0"), LabeledTweet("Apple is overpriced.", "0.0"))
val onlineModelResult, batchModelResult = labeledTweets
result.batchModelResult must equal(batchModelResult)
result.onlineModelResult must equal(onlineModelResult)
}
}
"return a FetchResponseTimeout when timeout in twitter handler is exceeded" in {
val actorNamePrefix = "twitter-timing-out"
val twitterHandler = system.actorOf(Props[TimingOutTwitterHandlerProxyStub], s"$actorNamePrefix-twitter-handler")
val onlineTrainer = system.actorOf(Props[OnlineTrainerProxyStub], s"$actorNamePrefix-online-trainer")
val batchTrainer = system.actorOf(Props[BatchTrainerProxyStub], s"$actorNamePrefix-batch-trainer")
val estimator = new PredictorProxyStub()
val classifier = system.actorOf(Props(new Classifier(sc, twitterHandler, onlineTrainer, batchTrainer, estimator)))
val probe = TestProbe()
within(2 second, 3 seconds) {
probe.send(classifier, Classify("apple", oAuthKeys))
probe.expectMsg(FetchResponseTimeout)
}
}
"return a TrainingModelRetrievalTimeout when timeout in training model response handler is exceeded" in {
val actorNamePrefix = "trainer-timing-out"
val twitterHandler = system.actorOf(Props[TwitterHandlerProxyStub], s"$actorNamePrefix-twitter-handler")
val onlineTrainer = system.actorOf(Props[TimingOutOnlineTrainerProxyStub], s"$actorNamePrefix-online-trainer")
val batchTrainer = system.actorOf(Props[BatchTrainerProxyStub], s"$actorNamePrefix-batch-trainer")
val estimator = new PredictorProxyStub()
val classifier = system.actorOf(Props(new Classifier(sc, twitterHandler, onlineTrainer, batchTrainer, estimator)))
val probe = TestProbe()
within(3 second, 4 seconds) {
probe.send(classifier, Classify("apple", oAuthKeys))
probe.expectMsg(4 seconds, TrainingModelRetrievalTimeout)
}
}
}
}
| openforce/spark-mllib-scala-play | test/actors/ClassifierSpec.scala | Scala | apache-2.0 | 3,524 |
package ir.ast
import lift.arithmetic.ArithExpr
import ir.interpreter.Interpreter._
import ir._
/**
* Representation of a "user function" declaration which usually operates on scala values.
* @param name The name of the function. This has to follow standard C naming conventions.
* @param paramNames The array of parameter names.
* @param body The body of the function as a string. The body currently must be valid OpenCL C code.
* @param inTs The types of the parameters. The size and order has to match with `paramNames`.
* @param outT The return type of the user function.
*/
case class UserFun(name: String, paramNames: Array[String], body: String,
inTs: Seq[Type], outT: Type)
extends FunDecl(inTs.length) {
// enforce at runtime that types and names match
if (paramNames.length != inTs.length || !namesAndTypesMatch())
throw new IllegalArgumentException(s"Structure of parameter names ( $paramNamesString ) " +
s"and the input type ( $inT ) doesn't match!")
var scalaFun: Seq[Any] => Any = null
def setScalaFun(f: Seq[Any] => Any): UserFun = {
scalaFun = f
this
}
/**
* Represent the types of the parameters as a single type.
* @return If there are multiple parameters a tuple type is returned.
* Otherwise, the type of the single parameter is returned.
*/
def inT = if (inTs.size == 1) inTs.head else TupleType(inTs:_*)
/**
* Returns the single name, or multiple names of the parameters.
* @return If there is only a single parameter return the name of it.
* Otherwise return the array of parameter names.
*/
def paramName = if (paramNames.length == 1) paramNames.head else paramNames
/**
* Vectorize the current function
* @param n The vector width
* @return
*/
def vectorize(n: ArithExpr): UserFun = {
val uf = new UserFun(s"$name$n", paramNames, body,
inTs.map(_.vectorize(n)), outT.vectorize(n))
uf.setScalaFun(xs => {
xs.asInstanceOf[Seq[Vector[Any]]].transpose.map(scalaFun).toVector
})
}
/**
* Get all unique tuple types from the types of this user function.
* @return A sequence of tuple types used in the definition of this user function.
*/
def tupleTypes: Seq[TupleType] = {
(inTAsTupleType ++ outTAsTupleType).distinct
}
override def checkType(argType: Type,
setType: Boolean): Type = {
val substitutions = Type.reify(inT, argType)
Type.substitute(outT, substitutions.toMap)
}
def eval(valueMap: ValueMap, args: Any*): Any = {
if (scalaFun != null) {
scalaFun(Seq(args:_*))
} else {
throw new NotImplementedError()
}
}
/**
* Return a tuple type if the output type is one, otherwise return an empty sequence.
* @return If the output type is a tuple return its type, otherwise return an empty sequence.
*/
private def outTAsTupleType: Seq[TupleType] = {
outT match {
case tt: TupleType => Seq(tt)
case _ => Seq()
}
}
/**
* Combine the type of the parameters and their name to figure out what names are mapping to
* tuples and what are mapping to scalar values.
*
* @return A sequence of tuple types referred to in the parameter types.
*/
private def inTAsTupleType: Seq[TupleType] = {
def emit(param: (Type, Any)): Seq[TupleType] = {
param match {
case (tt: TupleType, _:String) => Seq(tt)
case (tt: TupleType, names: Array[Any]) =>
(tt.elemsT zip names).flatMap { case (t, n) => emit((t, n)) }
case _ => Seq()
}
}
emit((inT, paramName))
}
// function for checking that names and types match
private def namesAndTypesMatch(): Boolean = {
def checkParam(param: (Type, Any)): Boolean = {
param match {
case (_:ScalarType, _: String) => true
case (_:VectorType, _: String) => true
case (_:TupleType, _: String) => true
case (tt:TupleType, names: Array[String]) =>
if (tt.elemsT.length != names.length) false
else (tt.elemsT zip names).forall( {case (t,n) => checkParam( (t,n) )} )
case _ => false
}
}
checkParam( (inT, paramName) )
}
// format parameter names
private lazy val paramNamesString: String = {
def printAny(arg: Any): String = arg match {
case a: Array[Any] => "Array(" + a.map(printAny).reduce(_+", "+_) + ")"
case _ => arg.toString
}
printAny(paramName)
}
// for debug purposes
override def toString = name
override def hashCode(): Int = {
val state = Seq(name, paramNamesString, body, inTs, outT, super.hashCode())
state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
}
override def equals(other: Any): Boolean = other match {
case that: UserFun =>
that.name == this.name &&
that.paramNames.sameElements(this.paramNames) &&
that.body == this.body &&
that.inTs == this.inTs &&
that.outT == outT
case _ => false
}
}
case class VectorizeUserFun(n: ArithExpr, userFun: UserFun)
extends FunDecl(userFun.inTs.length) {
override def checkType(argType: Type,
setType: Boolean): Type = {
val substitutions = Type.reify(userFun.inT.vectorize(n), argType)
Type.substitute(userFun.outT.vectorize(n), substitutions.toMap)
}
lazy val vectorizedFunction = userFun.vectorize(n)
}
object UserFun {
/**
* Constructor for creating instances of UserFun.
* This provides convenience for creating instances with a single parameter.
*
* @param name The name of the function. This has to follow standard C naming conventions.
* @param paramName The parameter name.
* @param body The body of the function as a string.
* The body currently must be valid OpenCL C code.
* @param inT The type of the parameter.
* @param outT The return type of the user function.
* @return
*/
def apply(name: String, paramName: String, body: String,
inT: Type, outT: Type): UserFun = {
UserFun(name, Array(paramName), body, Seq(inT), outT)
}
}
| lift-project/lift | src/main/ir/ast/UserFun.scala | Scala | mit | 6,148 |
package io.getquill.norm
import io.getquill.ast._
import io.getquill.ast.Implicits._
import io.getquill.norm.capture.AvoidAliasConflict
/**
* When actions are used with a `.returning` clause, remove the columns used in the returning clause from the action.
* E.g. for `insert(Person(id, name)).returning(_.id)` remove the `id` column from the original insert.
*/
object NormalizeReturning {
def apply(e: Action): Action = {
e match {
case ReturningGenerated(a: Action, alias, body) =>
// De-alias the body first so variable shadows won't accidentally be interpreted as columns to remove from the insert/update action.
// This typically occurs in advanced cases where actual queries are used in the return clauses which is only supported in Postgres.
// For example:
// query[Entity].insert(lift(Person(id, name))).returning(t => (query[Dummy].map(t => t.id).max))
// Since the property `t.id` is used both for the `returning` clause and the query inside, it can accidentally
// be seen as a variable used in `returning` hence excluded from insertion which is clearly not the case.
// In order to fix this, we need to change `t` into a different alias.
val newBody = dealiasBody(body, alias)
ReturningGenerated(apply(a, newBody, alias), alias, newBody)
// For a regular return clause, do not need to exclude assignments from insertion however, we still
// need to de-alias the Action body in case conflicts result. For example the following query:
// query[Entity].insert(lift(Person(id, name))).returning(t => (query[Dummy].map(t => t.id).max))
// would incorrectly be interpreted as:
// INSERT INTO Person (id, name) VALUES (1, 'Joe') RETURNING (SELECT MAX(id) FROM Dummy t) -- Note the 'id' in max which is coming from the inserted table instead of t
// whereas it should be:
// INSERT INTO Entity (id) VALUES (1) RETURNING (SELECT MAX(t.id) FROM Dummy t1)
case Returning(a: Action, alias, body) =>
val newBody = dealiasBody(body, alias)
Returning(a, alias, newBody)
case _ => e
}
}
/**
* In some situations, a query can exist inside of a `returning` clause. In this case, we need to rename
* if the aliases used in that query override the alias used in the `returning` clause otherwise
* they will be treated as returning-clause aliases ExpandReturning (i.e. they will become ExternalAlias instances)
* and later be tokenized incorrectly.
*/
private def dealiasBody(body: Ast, alias: Ident): Ast =
Transform(body) {
case q: Query => AvoidAliasConflict.sanitizeQuery(q, Set(alias.idName))
}
private def apply(e: Action, body: Ast, returningIdent: Ident): Action = e match {
case Insert(query, assignments) => Insert(query, filterReturnedColumn(assignments, body, returningIdent))
case Update(query, assignments) => Update(query, filterReturnedColumn(assignments, body, returningIdent))
case OnConflict(a: Action, target, act) => OnConflict(apply(a, body, returningIdent), target, act)
case _ => e
}
private def filterReturnedColumn(assignments: List[Assignment], column: Ast, returningIdent: Ident): List[Assignment] =
assignments.flatMap(filterReturnedColumn(_, column, returningIdent))
/**
* In situations like Property(Property(ident, foo), bar) pull out the inner-most ident
*/
object NestedProperty {
def unapply(ast: Property): Option[Ast] = {
ast match {
case p @ Property(subAst, _) => Some(innerMost(subAst))
case _ => None
}
}
private def innerMost(ast: Ast): Ast = ast match {
case Property(inner, _) => innerMost(inner)
case other => other
}
}
/**
* Remove the specified column from the assignment. For example, in a query like `insert(Person(id, name)).returning(r => r.id)`
* we need to remove the `id` column from the insertion. The value of the `column:Ast` in this case will be `Property(Ident(r), id)`
* and the values fo the assignment `p1` property will typically be `v.id` and `v.name` (the `v` variable is a default
* used for `insert` queries).
*/
private def filterReturnedColumn(assignment: Assignment, body: Ast, returningIdent: Ident): Option[Assignment] =
assignment match {
case Assignment(_, p1: Property, _) => {
// Pull out instance of the column usage. The `column` ast will typically be Property(table, field) but
// if the user wants to return multiple things it can also be a tuple Tuple(List(Property(table, field1), Property(table, field2))
// or it can even be a query since queries are allowed to be in return sections e.g:
// query[Entity].insert(lift(Person(id, name))).returning(r => (query[Dummy].filter(t => t.id == r.id).max))
// In all of these cases, we need to pull out the Property (e.g. t.id) in order to compare it to the assignment
// in order to know what to exclude.
val matchedProps =
CollectAst(body) {
//case prop @ NestedProperty(`returningIdent`) => prop
case prop @ NestedProperty(Ident(name, quat)) if (name == returningIdent.name) => prop
case prop @ NestedProperty(ExternalIdent(name, quat)) if (name == returningIdent.name) => prop
}
if (matchedProps.exists(matchedProp => isSameProperties(p1, matchedProp)))
None
else
Some(assignment)
}
case assignment => Some(assignment)
}
object SomeIdent {
def unapply(ast: Ast): Option[Ast] =
ast match {
case id: Ident => Some(id)
case id: ExternalIdent => Some(id)
case _ => None
}
}
/**
* Is it the same property (but possibly of a different identity). E.g. `p.foo.bar` and `v.foo.bar`
*/
private def isSameProperties(p1: Property, p2: Property): Boolean = (p1.ast, p2.ast) match {
case (SomeIdent(_), SomeIdent(_)) =>
p1.name == p2.name
// If it's Property(Property(Id), name) == Property(Property(Id), name) we need to check that the
// outer properties are the same before moving on to the inner ones.
case (pp1: Property, pp2: Property) if (p1.name == p2.name) =>
isSameProperties(pp1, pp2)
case _ =>
false
}
}
| getquill/quill | quill-core-portable/src/main/scala/io/getquill/norm/NormalizeReturning.scala | Scala | apache-2.0 | 6,431 |
package sio.iteratee
import cats.MonadError
import sio.core.IO
import sio.core.instances.all._
import io.iteratee.{IterateeErrorModule, EnumeratorErrorModule, EnumerateeModule, Module}
trait IOModule extends Module[IO]
with EnumerateeModule[IO]
with EnumeratorErrorModule[IO, Throwable]
with IterateeErrorModule[IO, Throwable]
{
final type M[f[_]] = MonadError[f, Throwable]
final protected val F: MonadError[IO, Throwable] = MonadError[IO, Throwable]
final protected def captureEffect[A](a: => A): IO[A] = IO.apply(a)
}
object `package` extends IOModule | alexknvl/sio | iteratee/src/main/scala/sio/iteratee/IOModule.scala | Scala | mit | 569 |
/*
* Copyright (C) 2011, Mysema Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mysema.scalagen
import java.io.File
import java.io.FileInputStream
import java.io.IOException
import org.junit.Test
import japa.parser.JavaParser
import com.mysema.scala.CompileTestUtils
import org.junit.Assert._
class ScalaCompilationTest extends AbstractParserTest with CompileTestUtils {
@Test
def Compile {
val resources = List[File](new File("src/test/scala/com/mysema/examples").listFiles():_*)
// parallel compilation
val failures = resources.filter(_.getName.endsWith(".java")).map { f =>
var unit = JavaParser.parse(new FileInputStream(f))
val source = toScala(unit)
try {
assertCompileSuccess(source)
null
} catch {
case e: AssertionError => (f.getName, e.getMessage)
//case e: Exception => (f.getName, e.getMessage)
}
}.toList.filter(_ != null).toMap
failures.foreach { case (n,m) => System.err.println(n + " => " + m)}
// known failures
val filtered = failures.-("Constructors3.java")
assertTrue(
failures.size + " of " + resources.size + " failures : " + failures.keys.mkString(", "),
filtered.isEmpty)
}
} | paddymahoney/scalagen | scalagen/src/test/scala/com/mysema/scalagen/ScalaCompilationTest.scala | Scala | apache-2.0 | 1,765 |
package de.htwg.zeta.persistence.actorCache
import java.util.UUID
import javax.inject.Singleton
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.concurrent.duration.FiniteDuration
import scala.util.Failure
import scala.util.Success
import akka.actor.ActorRef
import akka.actor.ActorSystem
import akka.pattern.ask
import akka.routing.ConsistentHashingPool
import akka.routing.ConsistentHashingRouter.ConsistentHashMapping
import akka.util.Timeout
import com.google.inject.Inject
import de.htwg.zeta.common.models.entity.File
import de.htwg.zeta.persistence.actorCache.FileCacheActor.Create
import de.htwg.zeta.persistence.actorCache.FileCacheActor.Delete
import de.htwg.zeta.persistence.actorCache.FileCacheActor.Read
import de.htwg.zeta.persistence.actorCache.FileCacheActor.Update
import de.htwg.zeta.persistence.general.FileRepository
/**
* Actor Cache Implementation of FilePersistence.
*/
@Singleton
class ActorCacheFileRepository @Inject()(
underlying: FileRepository,
system: ActorSystem,
numberActorsPerEntityType: Int,
cacheDuration: FiniteDuration,
implicit val timeout: Timeout
) extends FileRepository {
private def hashMapping: ConsistentHashMapping = {
case Create(file) => file.key.hashCode
case Read(id, name) => (id, name).hashCode
case Update(file) => file.key.hashCode
case Delete(id, name) => (id, name).hashCode
}
private val router: ActorRef = system.actorOf(
ConsistentHashingPool(
nrOfInstances = numberActorsPerEntityType,
hashMapping = hashMapping
).props(
FileCacheActor.props(underlying, cacheDuration)
),
"File"
)
/** Create a new file.
*
* @param file the file to save
* @return Future, with the created file
*/
override def create(file: File): Future[File] = {
(router ? Create(file)).flatMap {
case Success(file: File) => Future.successful(file)
case Failure(e) => Future.failed(e)
}
}
/** Read a file.
*
* @param id the id of the file
* @param name the name of the file
* @return Future containing the read file
*/
override def read(id: UUID, name: String): Future[File] = {
(router ? Read(id, name)).flatMap {
case Success(file: File) => Future.successful(file)
case Failure(e) => Future.failed(e)
}
}
/** Update a file.
*
* @param file The updated file
* @return Future containing the updated file
*/
override def update(file: File): Future[File] = {
(router ? Update(file)).flatMap {
case Success(file: File) => Future.successful(file)
case Failure(e) => Future.failed(e)
}
}
/** Delete a file.
*
* @param id The id of the file to delete
* @param name the name of the file
* @return Future
*/
override def delete(id: UUID, name: String): Future[Unit] = {
(router ? Delete(id, name)).flatMap {
case Success(()) => Future.successful(())
case Failure(e) => Future.failed(e)
}
}
/** Get the id's of all file.
*
* @return Future containing all id's of the file type
*/
override def readAllKeys(): Future[Map[UUID, Set[String]]] = {
underlying.readAllKeys()
}
}
| Zeta-Project/zeta | api/persistence/src/main/scala/de/htwg/zeta/persistence/actorCache/ActorCacheFileRepository.scala | Scala | bsd-2-clause | 3,217 |
package org.jetbrains.plugins.scala
package debugger
import java.util
import java.util.Collections
import com.intellij.debugger.engine._
import com.intellij.debugger.jdi.VirtualMachineProxyImpl
import com.intellij.debugger.requests.ClassPrepareRequestor
import com.intellij.debugger.{MultiRequestPositionManager, NoDataException, PositionManager, SourcePosition}
import com.intellij.openapi.editor.Document
import com.intellij.openapi.project.{DumbService, Project}
import com.intellij.openapi.roots.impl.DirectoryIndex
import com.intellij.openapi.util.Ref
import com.intellij.openapi.vfs.VirtualFile
import com.intellij.psi._
import com.intellij.psi.search.{FilenameIndex, GlobalSearchScope}
import com.intellij.psi.util.CachedValueProvider.Result
import com.intellij.psi.util.{CachedValueProvider, CachedValuesManager, PsiTreeUtil}
import com.intellij.util.{Processor, Query}
import com.sun.jdi._
import com.sun.jdi.request.ClassPrepareRequest
import org.jetbrains.annotations.{NotNull, Nullable}
import org.jetbrains.plugins.scala.caches.ScalaShortNamesCacheManager
import org.jetbrains.plugins.scala.debugger.ScalaPositionManager._
import org.jetbrains.plugins.scala.debugger.evaluation.ScalaEvaluatorBuilderUtil
import org.jetbrains.plugins.scala.debugger.evaluation.evaluator.ScalaCompilingEvaluator
import org.jetbrains.plugins.scala.debugger.evaluation.util.DebuggerUtil
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.{ScBindingPattern, ScConstructorPattern, ScInfixPattern}
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScParameter, ScParameters}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef._
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiManager
import org.jetbrains.plugins.scala.lang.psi.types.ValueClassType
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil
import org.jetbrains.plugins.scala.util.macroDebug.ScalaMacroDebuggingUtil
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.reflect.NameTransformer
import scala.util.Try
/**
* @author ilyas
*/
class ScalaPositionManager(val debugProcess: DebugProcess) extends PositionManager with MultiRequestPositionManager with LocationLineManager {
protected[debugger] val caches = new ScalaPositionManagerCaches(debugProcess)
import caches._
ScalaPositionManager.cacheInstance(this)
@Nullable
def getSourcePosition(@Nullable location: Location): SourcePosition = {
if (shouldSkip(location)) return null
val position =
for {
loc <- location.toOption
psiFile <- getPsiFileByReferenceType(debugProcess.getProject, loc.declaringType).toOption
lineNumber = exactLineNumber(location)
if lineNumber >= 0
} yield {
calcPosition(psiFile, location, lineNumber).getOrElse {
SourcePosition.createFromLine(psiFile, lineNumber)
}
}
position match {
case Some(p) => p
case None => throw NoDataException.INSTANCE
}
}
@NotNull
def getAllClasses(@NotNull position: SourcePosition): util.List[ReferenceType] = {
val file = position.getFile
throwIfNotScalaFile(file)
val generatedClassName = file.getUserData(ScalaCompilingEvaluator.classNameKey)
def hasLocations(refType: ReferenceType, position: SourcePosition): Boolean = {
try {
val generated = generatedClassName != null && refType.name().contains(generatedClassName)
lazy val sameFile = getPsiFileByReferenceType(file.getProject, refType) == file
generated || sameFile && locationsOfLine(refType, position).size > 0
} catch {
case _: NoDataException | _: AbsentInformationException | _: ClassNotPreparedException | _: ObjectCollectedException => false
}
}
val possiblePositions = positionsOnLine(file, position.getLine)
val exactClasses = ArrayBuffer[ReferenceType]()
val namePatterns = mutable.Set[NamePattern]()
inReadAction {
val onTheLine = possiblePositions.map(findGeneratingClassOrMethodParent)
if (onTheLine.isEmpty) return Collections.emptyList()
val nonLambdaParent =
if (isCompiledWithIndyLambdas(file)) {
val nonStrictParents = onTheLine.head.withParentsInFile
nonStrictParents.find(p => ScalaEvaluatorBuilderUtil.isGenerateNonAnonfunClass(p))
} else None
def addExactClasses(name: String) = {
exactClasses ++= debugProcess.getVirtualMachineProxy.classesByName(name).asScala
}
val sourceImages = onTheLine ++ nonLambdaParent
sourceImages.foreach {
case null =>
case tr: ScTrait if !DebuggerUtil.isLocalClass(tr) =>
val traitImplName = getSpecificNameForDebugger(tr)
val simpleName = traitImplName.stripSuffix("$class")
Seq(simpleName, traitImplName).foreach(addExactClasses)
case td: ScTypeDefinition if !DebuggerUtil.isLocalClass(td) =>
val qName = getSpecificNameForDebugger(td)
val delayedBodyName = if (isDelayedInit(td)) Seq(s"$qName$delayedInitBody") else Nil
(qName +: delayedBodyName).foreach(addExactClasses)
case elem =>
val namePattern = NamePattern.forElement(elem)
namePatterns ++= Option(namePattern)
}
}
val packageName: Option[String] = Option(inReadAction(file.asInstanceOf[ScalaFile].getPackageName))
val foundWithPattern =
if (namePatterns.isEmpty) Nil
else filterAllClasses(c => hasLocations(c, position) && namePatterns.exists(_.matches(c)), packageName)
(exactClasses ++ foundWithPattern).distinct.asJava
}
@NotNull
def locationsOfLine(@NotNull refType: ReferenceType, @NotNull position: SourcePosition): util.List[Location] = {
throwIfNotScalaFile(position.getFile)
checkForIndyLambdas(refType)
try {
val line: Int = position.getLine
locationsOfLine(refType, line).asJava
}
catch {
case e: AbsentInformationException => Collections.emptyList()
}
}
def createPrepareRequest(@NotNull requestor: ClassPrepareRequestor, @NotNull position: SourcePosition): ClassPrepareRequest = {
throw new IllegalStateException("This class implements MultiRequestPositionManager, corresponding createPrepareRequests version should be used")
}
override def createPrepareRequests(requestor: ClassPrepareRequestor, position: SourcePosition): util.List[ClassPrepareRequest] = {
def isLocalOrUnderDelayedInit(definition: PsiClass): Boolean = {
DebuggerUtil.isLocalClass(definition) || isDelayedInit(definition)
}
def findEnclosingTypeDefinition: Option[ScTypeDefinition] = {
@tailrec
def notLocalEnclosingTypeDefinition(element: PsiElement): Option[ScTypeDefinition] = {
PsiTreeUtil.getParentOfType(element, classOf[ScTypeDefinition]) match {
case null => None
case td if DebuggerUtil.isLocalClass(td) => notLocalEnclosingTypeDefinition(td.getParent)
case td => Some(td)
}
}
val element = nonWhitespaceElement(position)
notLocalEnclosingTypeDefinition(element)
}
def createPrepareRequest(position: SourcePosition): ClassPrepareRequest = {
val qName = new Ref[String](null)
val waitRequestor = new Ref[ClassPrepareRequestor](null)
inReadAction {
val sourceImage = findReferenceTypeSourceImage(position)
val insideMacro: Boolean = isInsideMacro(nonWhitespaceElement(position))
sourceImage match {
case cl: ScClass if ValueClassType.isValueClass(cl) =>
//there are no instances of value classes, methods from companion object are used
qName.set(getSpecificNameForDebugger(cl) + "$")
case tr: ScTrait if !DebuggerUtil.isLocalClass(tr) =>
//to handle both trait methods encoding
qName.set(tr.getQualifiedNameForDebugger + "*")
case typeDef: ScTypeDefinition if !isLocalOrUnderDelayedInit(typeDef) =>
val specificName = getSpecificNameForDebugger(typeDef)
qName.set(if (insideMacro) specificName + "*" else specificName)
case _ =>
findEnclosingTypeDefinition.foreach(typeDef => qName.set(typeDef.getQualifiedNameForDebugger + "*"))
}
// Enclosing type definition is not found
if (qName.get == null) {
qName.set(SCRIPT_HOLDER_CLASS_NAME + "*")
}
waitRequestor.set(new ScalaPositionManager.MyClassPrepareRequestor(position, requestor))
}
debugProcess.getRequestsManager.createClassPrepareRequest(waitRequestor.get, qName.get)
}
val file = position.getFile
throwIfNotScalaFile(file)
val possiblePositions = inReadAction {
positionsOnLine(file, position.getLine).map(SourcePosition.createFromElement)
}
possiblePositions.map(createPrepareRequest).asJava
}
private def throwIfNotScalaFile(file: PsiFile): Unit = {
if (!checkScalaFile(file)) throw NoDataException.INSTANCE
}
private def checkScalaFile(file: PsiFile): Boolean = file match {
case sf: ScalaFile => !sf.isCompiled
case _ => false
}
private def filterAllClasses(condition: ReferenceType => Boolean, packageName: Option[String]): Seq[ReferenceType] = {
def samePackage(refType: ReferenceType) = {
val name = refType.name()
val lastDot = name.lastIndexOf('.')
val refTypePackageName = if (lastDot < 0) "" else name.substring(0, lastDot)
packageName.isEmpty || packageName.contains(refTypePackageName)
}
def isAppropriate(refType: ReferenceType) = {
Try(samePackage(refType) && refType.isInitialized && condition(refType)).getOrElse(false)
}
import scala.collection.JavaConverters._
for {
refType <- debugProcess.getVirtualMachineProxy.allClasses.asScala
if isAppropriate(refType)
} yield {
refType
}
}
@Nullable
private def findReferenceTypeSourceImage(@NotNull position: SourcePosition): PsiElement = {
val element = nonWhitespaceElement(position)
findGeneratingClassOrMethodParent(element)
}
protected def nonWhitespaceElement(@NotNull position: SourcePosition): PsiElement = {
val file = position.getFile
@tailrec
def nonWhitespaceInner(element: PsiElement, document: Document): PsiElement = {
element match {
case null => null
case ws: PsiWhiteSpace if document.getLineNumber(element.getTextRange.getEndOffset) == position.getLine =>
val nextElement = file.findElementAt(element.getTextRange.getEndOffset)
nonWhitespaceInner(nextElement, document)
case _ => element
}
}
if (!file.isInstanceOf[ScalaFile]) null
else {
val firstElement = file.findElementAt(position.getOffset)
try {
val document = PsiDocumentManager.getInstance(file.getProject).getDocument(file)
nonWhitespaceInner(firstElement, document)
}
catch {
case t: Throwable => firstElement
}
}
}
private def calcPosition(file: PsiFile, location: Location, lineNumber: Int): Option[SourcePosition] = {
throwIfNotScalaFile(file)
def isDefaultArgument(method: Method) = {
val methodName = method.name()
val lastDollar = methodName.lastIndexOf("$")
if (lastDollar >= 0) {
val (start, index) = methodName.splitAt(lastDollar + 1)
(start.endsWith("$default$"), index)
}
else (false, "")
}
def findDefaultArg(possiblePositions: Seq[PsiElement], defaultArgIndex: String) : Option[PsiElement] = {
try {
val paramNumber = defaultArgIndex.toInt - 1
possiblePositions.find {
case e =>
val scParameters = PsiTreeUtil.getParentOfType(e, classOf[ScParameters])
if (scParameters != null) {
val param = scParameters.params(paramNumber)
param.isDefaultParam && param.isAncestorOf(e)
}
else false
}
} catch {
case e: Exception => None
}
}
def calcElement(): Option[PsiElement] = {
val possiblePositions = positionsOnLine(file, lineNumber)
val currentMethod = location.method()
lazy val (isDefaultArg, defaultArgIndex) = isDefaultArgument(currentMethod)
def findPsiElementForIndyLambda(): Option[PsiElement] = {
val lambdas = lambdasOnLine(file, lineNumber)
val methods = indyLambdaMethodsOnLine(location.declaringType(), lineNumber)
val methodsToLambdas = methods.zip(lambdas).toMap
methodsToLambdas.get(currentMethod)
}
if (possiblePositions.size <= 1) {
possiblePositions.headOption
}
else if (isIndyLambda(currentMethod)) {
findPsiElementForIndyLambda()
}
else if (isDefaultArg) {
findDefaultArg(possiblePositions, defaultArgIndex)
}
else if (!isAnonfun(currentMethod)) {
possiblePositions.find {
case e: PsiElement if isLambda(e) => false
case (e: ScExpression) childOf (p: ScParameter) => false
case _ => true
}
}
else {
val generatingPsiElem = findElementByReferenceType(location.declaringType())
possiblePositions.find(p => generatingPsiElem.contains(findGeneratingClassOrMethodParent(p)))
}
}
calcElement().map(SourcePosition.createFromElement)
}
private def findScriptFile(refType: ReferenceType): Option[PsiFile] = {
try {
val name = refType.name()
if (name.startsWith(SCRIPT_HOLDER_CLASS_NAME)) {
cachedSourceName(refType) match {
case Some(srcName) =>
val files = FilenameIndex.getFilesByName(debugProcess.getProject, srcName, debugProcess.getSearchScope)
files.headOption
case _ => None
}
}
else None
}
catch {
case e: AbsentInformationException => None
}
}
@Nullable
private def getPsiFileByReferenceType(project: Project, refType: ReferenceType): PsiFile = {
if (refType == null) return null
if (refTypeToFileCache.contains(refType)) return refTypeToFileCache(refType)
def searchForMacroDebugging(qName: String): PsiFile = {
val directoryIndex: DirectoryIndex = DirectoryIndex.getInstance(project)
val dotIndex = qName.lastIndexOf(".")
val packageName = if (dotIndex > 0) qName.substring(0, dotIndex) else ""
val query: Query[VirtualFile] = directoryIndex.getDirectoriesByPackageName(packageName, true)
val fileNameWithoutExtension = if (dotIndex > 0) qName.substring(dotIndex + 1) else qName
val fileNames: util.Set[String] = new util.HashSet[String]
import scala.collection.JavaConversions._
for (extention <- ScalaLoader.SCALA_EXTENSIONS) {
fileNames.add(fileNameWithoutExtension + "." + extention)
}
val result = new Ref[PsiFile]
query.forEach(new Processor[VirtualFile] {
override def process(vDir: VirtualFile): Boolean = {
var isFound = false
for {
fileName <- fileNames
if !isFound
vFile <- vDir.findChild(fileName).toOption
} {
val psiFile: PsiFile = PsiManager.getInstance(project).findFile(vFile)
val debugFile: PsiFile = ScalaMacroDebuggingUtil.loadCode(psiFile, force = false)
if (debugFile != null) {
result.set(debugFile)
isFound = true
}
else if (psiFile.isInstanceOf[ScalaFile]) {
result.set(psiFile)
isFound = true
}
}
!isFound
}
})
result.get
}
def findFile() = {
def withDollarTestName(originalQName: String): Option[String] = {
val dollarTestSuffix = "$Test" //See SCL-9340
if (originalQName.endsWith(dollarTestSuffix)) Some(originalQName)
else if (originalQName.contains(dollarTestSuffix + "$")) {
val index = originalQName.indexOf(dollarTestSuffix) + dollarTestSuffix.length
Some(originalQName.take(index))
}
else None
}
def topLevelClassName(originalQName: String): String = {
if (originalQName.endsWith(packageSuffix)) originalQName
else originalQName.replace(packageSuffix, ".").takeWhile(_ != '$')
}
def tryToFindClass(name: String) = {
findClassByQualName(name, isScalaObject = false)
.orElse(findClassByQualName(name, isScalaObject = true))
}
val scriptFile = findScriptFile(refType)
val file = scriptFile.getOrElse {
val originalQName = NameTransformer.decode(refType.name)
if (!ScalaMacroDebuggingUtil.isEnabled) {
val clazz = withDollarTestName(originalQName).flatMap(tryToFindClass)
.orElse(tryToFindClass(topLevelClassName(originalQName)))
clazz.map(_.getNavigationElement.getContainingFile).orNull
}
else
searchForMacroDebugging(topLevelClassName(originalQName))
}
file
}
val file = inReadAction(findFile())
if (file != null && refType.methods().asScala.exists(isIndyLambda)) {
isCompiledWithIndyLambdasCache.put(file, true)
}
refTypeToFileCache.put(refType, file)
file
}
private def nameMatches(elem: PsiElement, refType: ReferenceType): Boolean = {
val pattern = NamePattern.forElement(elem)
pattern != null && pattern.matches(refType)
}
private def checkForIndyLambdas(refType: ReferenceType) = {
if (!refTypeToFileCache.contains(refType)) {
getPsiFileByReferenceType(debugProcess.getProject, refType)
}
}
def findElementByReferenceType(refType: ReferenceType): Option[PsiElement] = {
def createPointer(elem: PsiElement) =
SmartPointerManager.getInstance(debugProcess.getProject).createSmartPsiElementPointer(elem)
refTypeToElementCache.get(refType) match {
case Some(Some(p)) if p.getElement != null => Some(p.getElement)
case Some(Some(_)) | None =>
val found = findElementByReferenceTypeInner(refType)
refTypeToElementCache.update(refType, found.map(createPointer))
found
case Some(None) => None
}
}
private def findElementByReferenceTypeInner(refType: ReferenceType): Option[PsiElement] = {
val byName = findByQualName(refType) orElse findByShortName(refType)
if (byName.isDefined) return byName
val project = debugProcess.getProject
val allLocations = Try(refType.allLineLocations().asScala).getOrElse(Seq.empty)
val refTypeLineNumbers = allLocations.map(checkedLineNumber).filter(_ > 0)
if (refTypeLineNumbers.isEmpty) return None
val firstRefTypeLine = refTypeLineNumbers.min
val lastRefTypeLine = refTypeLineNumbers.max
val refTypeLines = firstRefTypeLine to lastRefTypeLine
val file = getPsiFileByReferenceType(project, refType)
if (!checkScalaFile(file)) return None
val document = PsiDocumentManager.getInstance(project).getDocument(file)
if (document == null) return None
def elementLineRange(elem: PsiElement, document: Document) = {
val startLine = document.getLineNumber(elem.getTextRange.getStartOffset)
val endLine = document.getLineNumber(elem.getTextRange.getEndOffset)
startLine to endLine
}
def checkLines(elem: PsiElement, document: Document) = {
val lineRange = elementLineRange(elem, document)
//intersection, very loose check because sometimes first line for <init> method is after range of the class
firstRefTypeLine <= lineRange.end && lastRefTypeLine >= lineRange.start
}
def isAppropriateCandidate(elem: PsiElement) = {
checkLines(elem, document) && ScalaEvaluatorBuilderUtil.isGenerateClass(elem) && nameMatches(elem, refType)
}
def findCandidates(): Seq[PsiElement] = {
def findAt(offset: Int): Option[PsiElement] = {
val startElem = file.findElementAt(offset)
startElem.parentsInFile.find(isAppropriateCandidate)
}
if (lastRefTypeLine - firstRefTypeLine >= 2) {
val offsetsInTheMiddle = Seq(
document.getLineEndOffset(firstRefTypeLine),
document.getLineEndOffset(firstRefTypeLine + 1)
)
offsetsInTheMiddle.flatMap(findAt).distinct
}
else {
val firstLinePositions = positionsOnLine(file, firstRefTypeLine)
val allPositions =
if (firstRefTypeLine == lastRefTypeLine) firstLinePositions
else firstLinePositions ++ positionsOnLine(file, lastRefTypeLine)
allPositions.distinct.filter(isAppropriateCandidate)
}
}
def filterWithSignature(candidates: Seq[PsiElement]) = {
val applySignature = refType.methodsByName("apply").asScala.find(m => !m.isSynthetic).map(_.signature())
if (applySignature.isEmpty) candidates
else {
candidates.filter(l => applySignature == DebuggerUtil.lambdaJVMSignature(l))
}
}
val candidates = findCandidates()
if (candidates.size <= 1) return candidates.headOption
if (refTypeLines.size > 1) {
val withExactlySameLines = candidates.filter(elementLineRange(_, document) == refTypeLines)
if (withExactlySameLines.size == 1) return withExactlySameLines.headOption
}
if (candidates.exists(!isLambda(_))) return candidates.headOption
val filteredWithSignature = filterWithSignature(candidates)
if (filteredWithSignature.size == 1) return filteredWithSignature.headOption
val byContainingClasses = filteredWithSignature.groupBy(c => findGeneratingClassOrMethodParent(c.getParent))
if (byContainingClasses.size > 1) {
findContainingClass(refType) match {
case Some(e) => return byContainingClasses.get(e).flatMap(_.headOption)
case None =>
}
}
filteredWithSignature.headOption
}
private def findClassByQualName(qName: String, isScalaObject: Boolean): Option[PsiClass] = {
val project = debugProcess.getProject
val cacheManager = ScalaShortNamesCacheManager.getInstance(project)
val classes =
if (qName.endsWith(packageSuffix))
Option(cacheManager.getPackageObjectByName(qName.stripSuffix(packageSuffix), GlobalSearchScope.allScope(project))).toSeq
else
cacheManager.getClassesByFQName(qName.replace(packageSuffix, "."), debugProcess.getSearchScope)
val clazz =
if (classes.length == 1) classes.headOption
else if (classes.length >= 2) {
if (isScalaObject) classes.find(_.isInstanceOf[ScObject])
else classes.find(!_.isInstanceOf[ScObject])
}
else None
clazz.filter(_.isValid)
}
private def findByQualName(refType: ReferenceType): Option[PsiClass] = {
val originalQName = NameTransformer.decode(refType.name)
val endsWithPackageSuffix = originalQName.endsWith(packageSuffix)
val withoutSuffix =
if (endsWithPackageSuffix) originalQName.stripSuffix(packageSuffix)
else originalQName.stripSuffix("$").stripSuffix("$class")
val withDots = withoutSuffix.replace(packageSuffix, ".").replace('$', '.')
val transformed = if (endsWithPackageSuffix) withDots + packageSuffix else withDots
findClassByQualName(transformed, originalQName.endsWith("$"))
}
private def findByShortName(refType: ReferenceType): Option[PsiClass] = {
val project = debugProcess.getProject
if (DumbService.getInstance(project).isDumb) return None
lazy val sourceName = cachedSourceName(refType).getOrElse("")
def sameFileName(elem: PsiElement) = {
val containingFile = elem.getContainingFile
containingFile != null && containingFile.name == sourceName
}
val originalQName = NameTransformer.decode(refType.name)
val withoutSuffix =
if (originalQName.endsWith(packageSuffix)) originalQName
else originalQName.replace(packageSuffix, ".").stripSuffix("$").stripSuffix("$class")
val lastDollar = withoutSuffix.lastIndexOf('$')
val lastDot = withoutSuffix.lastIndexOf('.')
val index = Seq(lastDollar, lastDot, 0).max + 1
val name = withoutSuffix.drop(index)
val isScalaObject = originalQName.endsWith("$")
val cacheManager = ScalaShortNamesCacheManager.getInstance(project)
val classes = cacheManager.getClassesByName(name, GlobalSearchScope.allScope(project))
val inSameFile = classes.filter(c => c.isValid && sameFileName(c))
if (inSameFile.length == 1) classes.headOption
else if (inSameFile.length >= 2) {
if (isScalaObject) inSameFile.find(_.isInstanceOf[ScObject])
else inSameFile.find(!_.isInstanceOf[ScObject])
}
else None
}
private def findContainingClass(refType: ReferenceType): Option[PsiElement] = {
def classesByName(s: String) = {
val vm = debugProcess.getVirtualMachineProxy
vm.classesByName(s).asScala
}
val name = NameTransformer.decode(refType.name())
val index = name.lastIndexOf("$$")
if (index < 0) return None
val containingName = NameTransformer.encode(name.substring(0, index))
classesByName(containingName).headOption.flatMap(findElementByReferenceType)
}
}
object ScalaPositionManager {
private val SCRIPT_HOLDER_CLASS_NAME: String = "Main$$anon$1"
private val packageSuffix = ".package$"
private val delayedInitBody = "delayedInit$body"
private val isCompiledWithIndyLambdasCache = mutable.HashMap[PsiFile, Boolean]()
private val instances = mutable.HashMap[DebugProcess, ScalaPositionManager]()
private def cacheInstance(scPosManager: ScalaPositionManager) = {
val debugProcess = scPosManager.debugProcess
instances.put(debugProcess, scPosManager)
debugProcess.addDebugProcessListener(new DebugProcessAdapter {
override def processDetached(process: DebugProcess, closedByUser: Boolean): Unit = {
ScalaPositionManager.instances.remove(process)
debugProcess.removeDebugProcessListener(this)
}
})
}
def instance(vm: VirtualMachine): Option[ScalaPositionManager] = instances.collectFirst {
case (process, manager) if getVM(process).contains(vm) => manager
}
def instance(debugProcess: DebugProcess): Option[ScalaPositionManager] = instances.get(debugProcess)
def instance(mirror: Mirror): Option[ScalaPositionManager] = instance(mirror.virtualMachine())
private def getVM(debugProcess: DebugProcess) = {
debugProcess.getVirtualMachineProxy match {
case impl: VirtualMachineProxyImpl => Option(impl.getVirtualMachine)
case _ => None
}
}
def positionsOnLine(file: PsiFile, lineNumber: Int): Seq[PsiElement] = {
if (lineNumber < 0) return Seq.empty
val scFile = file match {
case sf: ScalaFile => sf
case _ => return Seq.empty
}
val cacheProvider = new CachedValueProvider[mutable.HashMap[Int, Seq[PsiElement]]] {
override def compute(): Result[mutable.HashMap[Int, Seq[PsiElement]]] = Result.create(mutable.HashMap[Int, Seq[PsiElement]](), file)
}
CachedValuesManager.getCachedValue(file, cacheProvider).getOrElseUpdate(lineNumber, positionsOnLineInner(scFile, lineNumber))
}
def checkedLineNumber(location: Location): Int =
try location.lineNumber() - 1
catch {case ie: InternalError => -1}
def cachedSourceName(refType: ReferenceType) = {
ScalaPositionManager.instance(refType).map(_.caches).flatMap(_.cachedSourceName(refType))
}
private def positionsOnLineInner(file: ScalaFile, lineNumber: Int): Seq[PsiElement] = {
inReadAction {
val document = PsiDocumentManager.getInstance(file.getProject).getDocument(file)
if (document == null || lineNumber >= document.getLineCount) return Seq.empty
val startLine = document.getLineStartOffset(lineNumber)
val endLine = document.getLineEndOffset(lineNumber)
def elementsOnTheLine(file: ScalaFile, lineNumber: Int): Seq[PsiElement] = {
val result = ArrayBuffer[PsiElement]()
var elem = file.findElementAt(startLine)
while (elem != null && elem.getTextOffset <= endLine) {
elem match {
case ChildOf(_: ScUnitExpr) | ChildOf(ScBlock()) =>
result += elem
case ElementType(t) if ScalaTokenTypes.WHITES_SPACES_AND_COMMENTS_TOKEN_SET.contains(t) ||
ScalaTokenTypes.BRACES_TOKEN_SET.contains(t) =>
case _ =>
result += elem
}
elem = PsiTreeUtil.nextLeaf(elem, true)
}
result
}
def findParent(element: PsiElement): Option[PsiElement] = {
val parentsOnTheLine = element.withParentsInFile.takeWhile(e => e.getTextOffset > startLine).toIndexedSeq
val anon = parentsOnTheLine.collectFirst {
case e if isLambda(e) => e
case newTd: ScNewTemplateDefinition if DebuggerUtil.generatesAnonClass(newTd) => newTd
}
val filteredParents = parentsOnTheLine.reverse.filter {
case _: ScExpression => true
case _: ScConstructorPattern | _: ScInfixPattern | _: ScBindingPattern => true
case callRefId childOf ((ref: ScReferenceExpression) childOf (_: ScMethodCall))
if ref.nameId == callRefId && ref.getTextRange.getStartOffset < startLine => true
case _: ScTypeDefinition => true
case _ => false
}
val maxExpressionPatternOrTypeDef =
filteredParents.find(!_.isInstanceOf[ScBlock]).orElse(filteredParents.headOption)
Seq(anon, maxExpressionPatternOrTypeDef).flatten.sortBy(_.getTextLength).headOption
}
elementsOnTheLine(file, lineNumber).flatMap(findParent).distinct
}
}
def isLambda(element: PsiElement) = {
ScalaEvaluatorBuilderUtil.isGenerateAnonfun(element) && !isInsideMacro(element)
}
def lambdasOnLine(file: PsiFile, lineNumber: Int): Seq[PsiElement] = {
positionsOnLine(file, lineNumber).filter(isLambda)
}
def isIndyLambda(m: Method): Boolean = {
val name = m.name()
val lastDollar = name.lastIndexOf('$')
lastDollar > 0 && name.substring(0, lastDollar).endsWith("$anonfun")
}
def isAnonfunType(refType: ReferenceType) = {
refType match {
case ct: ClassType =>
val supClass = ct.superclass()
supClass != null && supClass.name().startsWith("scala.runtime.AbstractFunction")
case _ => false
}
}
def isAnonfun(m: Method): Boolean = {
isIndyLambda(m) || m.name.startsWith("apply") && isAnonfunType(m.declaringType())
}
def indyLambdaMethodsOnLine(refType: ReferenceType, lineNumber: Int): Seq[Method] = {
def ordinal(m: Method) = {
val name = m.name()
val lastDollar = name.lastIndexOf('$')
Try(name.substring(lastDollar + 1).toInt).getOrElse(-1)
}
val all = refType.methods().asScala.filter(isIndyLambda)
val onLine = all.filter(m => Try(!m.locationsOfLine(lineNumber + 1).isEmpty).getOrElse(false))
onLine.sortBy(ordinal)
}
def isCompiledWithIndyLambdas(file: PsiFile): Boolean = {
if (file == null) false
else {
val originalFile = Option(file.getUserData(ScalaCompilingEvaluator.originalFileKey)).getOrElse(file)
isCompiledWithIndyLambdasCache.getOrElse(originalFile, false)
}
}
@tailrec
def findGeneratingClassOrMethodParent(element: PsiElement): PsiElement = {
element match {
case null => null
case elem if ScalaEvaluatorBuilderUtil.isGenerateClass(elem) || isLambda(elem) => elem
case InsideMacro(macroCall) => macroCall
case elem => findGeneratingClassOrMethodParent(elem.getParent)
}
}
private object MacroDef {
val macroImpl = "scala.reflect.macros.internal.macroImpl"
def unapply(fun: ScFunction): Option[ScFunction] = {
fun match {
case m: ScMacroDefinition => Some(m)
case _ if fun.annotations.map(_.constructor.typeElement.getText).contains(macroImpl) => Some(fun)
case _ => None
}
}
}
private object InsideMacro {
def unapply(elem: PsiElement): Option[ScMethodCall] = {
elem.parentsInFile.collectFirst {
case mc @ ScMethodCall(ResolvesTo(MacroDef(_)), _) => mc
}
}
}
object InsideAsync {
def unapply(elem: PsiElement): Option[ScMethodCall] = elem match {
case InsideMacro(call @ ScMethodCall(ref: ScReferenceExpression, _)) if ref.refName == "async" => Some(call)
case _ => None
}
}
def isInsideMacro(elem: PsiElement): Boolean = InsideMacro.unapply(elem).isDefined
def shouldSkip(location: Location, debugProcess: DebugProcess) = {
ScalaPositionManager.instance(debugProcess).forall(_.shouldSkip(location))
}
private def getSpecificNameForDebugger(td: ScTypeDefinition): String = {
val name = td.getQualifiedNameForDebugger
td match {
case _: ScObject => s"$name$$"
case _: ScTrait => s"$name$$class"
case _ => name
}
}
def isDelayedInit(cl: PsiClass) = cl match {
case obj: ScObject =>
val manager: ScalaPsiManager = ScalaPsiManager.instance(obj.getProject)
val clazz: PsiClass = manager.getCachedClass(obj.getResolveScope, "scala.DelayedInit").orNull
clazz != null && manager.cachedDeepIsInheritor(obj, clazz)
case _ => false
}
private class MyClassPrepareRequestor(position: SourcePosition, requestor: ClassPrepareRequestor) extends ClassPrepareRequestor {
private val sourceFile = position.getFile
private val sourceName = sourceFile.getName
private def sourceNameOf(refType: ReferenceType): Option[String] = ScalaPositionManager.cachedSourceName(refType)
def processClassPrepare(debuggerProcess: DebugProcess, referenceType: ReferenceType) {
val positionManager: CompoundPositionManager = debuggerProcess.asInstanceOf[DebugProcessImpl].getPositionManager
if (!sourceNameOf(referenceType).contains(sourceName)) return
if (positionManager.locationsOfLine(referenceType, position).size > 0) {
requestor.processClassPrepare(debuggerProcess, referenceType)
}
else {
val positionClasses: util.List[ReferenceType] = positionManager.getAllClasses(position)
if (positionClasses.contains(referenceType)) {
requestor.processClassPrepare(debuggerProcess, referenceType)
}
}
}
}
private class NamePattern(elem: PsiElement) {
private val containingFile = elem.getContainingFile
private val sourceName = containingFile.getName
private val isGeneratedForCompilingEvaluator = containingFile.getUserData(ScalaCompilingEvaluator.classNameKey) != null
private var compiledWithIndyLambdas = isCompiledWithIndyLambdas(containingFile)
private val exactName: Option[String] = {
elem match {
case td: ScTypeDefinition if !DebuggerUtil.isLocalClass(td) =>
Some(getSpecificNameForDebugger(td))
case _ => None
}
}
private var classJVMNameParts: Seq[String] = null
private def computeClassJVMNameParts: Seq[String] = {
if (exactName.isDefined) Seq.empty
else inReadAction {
val parts = elem.withParentsInFile.flatMap(partsFor)
parts.toSeq.reverse
}
}
private def partsFor(elem: PsiElement): Seq[String] = {
elem match {
case td: ScTypeDefinition => Seq(ScalaNamesUtil.toJavaName(td.name))
case newTd: ScNewTemplateDefinition if DebuggerUtil.generatesAnonClass(newTd) => Seq("$anon")
case e if ScalaEvaluatorBuilderUtil.isGenerateClass(e) => partsForAnonfun(e)
case _ => Seq.empty
}
}
private def partsForAnonfun(elem: PsiElement): Seq[String] = {
val anonfunCount = ScalaEvaluatorBuilderUtil.anonClassCount(elem)
val lastParts = Seq.fill(anonfunCount - 1)(Seq("$apply", "$anonfun")).flatten
val containingClass = findGeneratingClassOrMethodParent(elem.getParent)
val owner = PsiTreeUtil.getParentOfType(elem, classOf[ScFunctionDefinition], classOf[ScTypeDefinition],
classOf[ScPatternDefinition], classOf[ScVariableDefinition])
val firstParts =
if (PsiTreeUtil.isAncestor(owner, containingClass, true)) Seq("$anonfun")
else owner match {
case fun: ScFunctionDefinition =>
val name = if (fun.name == "this") JVMNameUtil.CONSTRUCTOR_NAME else fun.name
val encoded = NameTransformer.encode(name)
Seq(s"$$$encoded", "$anonfun")
case _ => Seq("$anonfun")
}
lastParts ++ firstParts
}
private def checkParts(name: String): Boolean = {
var nameTail = name
updateParts()
for (part <- classJVMNameParts) {
val index = nameTail.indexOf(part)
if (index >= 0) {
nameTail = nameTail.substring(index + part.length)
}
else return false
}
nameTail.indexOf("$anon") == -1
}
def updateParts(): Unit = {
val newValue = isCompiledWithIndyLambdas(containingFile)
if (newValue != compiledWithIndyLambdas || classJVMNameParts == null) {
compiledWithIndyLambdas = newValue
classJVMNameParts = computeClassJVMNameParts
}
}
def matches(refType: ReferenceType): Boolean = {
val refTypeSourceName = cachedSourceName(refType).getOrElse("")
if (refTypeSourceName != sourceName && !isGeneratedForCompilingEvaluator) return false
val name = refType.name()
exactName match {
case Some(qName) => qName == name || qName.stripSuffix("$class") == name
case None => checkParts(name)
}
}
}
private object NamePattern {
def forElement(elem: PsiElement): NamePattern = {
if (elem == null || !ScalaEvaluatorBuilderUtil.isGenerateClass(elem)) return null
val cacheProvider = new CachedValueProvider[NamePattern] {
override def compute(): Result[NamePattern] = Result.create(new NamePattern(elem), elem)
}
CachedValuesManager.getCachedValue(elem, cacheProvider)
}
}
private[debugger] class ScalaPositionManagerCaches(debugProcess: DebugProcess) {
debugProcess.addDebugProcessListener(new DebugProcessAdapter {
override def processDetached(process: DebugProcess, closedByUser: Boolean): Unit = {
clear()
process.removeDebugProcessListener(this)
}
})
val refTypeToFileCache = mutable.HashMap[ReferenceType, PsiFile]()
val refTypeToElementCache = mutable.HashMap[ReferenceType, Option[SmartPsiElementPointer[PsiElement]]]()
val customizedLocationsCache = mutable.HashMap[Location, Int]()
val lineToCustomizedLocationCache = mutable.HashMap[(ReferenceType, Int), Seq[Location]]()
val seenRefTypes = mutable.Set[ReferenceType]()
val sourceNames = mutable.HashMap[ReferenceType, Option[String]]()
def cachedSourceName(refType: ReferenceType): Option[String] =
sourceNames.getOrElseUpdate(refType, Try(refType.sourceName()).toOption)
def clear(): Unit = {
isCompiledWithIndyLambdasCache.clear()
refTypeToFileCache.clear()
refTypeToElementCache.clear()
customizedLocationsCache.clear()
lineToCustomizedLocationCache.clear()
seenRefTypes.clear()
sourceNames.clear()
}
}
} | whorbowicz/intellij-scala | src/org/jetbrains/plugins/scala/debugger/ScalaPositionManager.scala | Scala | apache-2.0 | 39,416 |
package models.data
import main.ParsedAppScanSourceXmlData
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
class AppScanStringMatcher
{
/**
* Function for mapping values in taint records with strings where appropriate, which is just the arg_name. Also
* makes the list searchable by the taint's id value, allowing for easier lookup later
*
* @param parsedData The container object containing all of the data parsed during various points in this application
* @return The list of taints, with strings mapped, searchable by taint_id
*/
def createTaintStrings( parsedData : ParsedAppScanSourceXmlData ) : mutable.ListMap[ String, Map[ String, String ] ] =
{
//Performance and clarity improvements, retrieving the data from the container first, rather than each lookup
val taints = parsedData.getTaints
val strings = parsedData.getStrings
//The object to store the taint lookups, with strings included where appropriate
var taintStrings = new mutable.ListMap[ String, Map[ String, String ] ]
//Iterates through the list of taint records retrieved from the XML file
for( taint <- taints )
{
//Builder for the taint list
var taintString = new mutable.ListMap[ String, String ]
//Stores the argument type record, currently unused
taintString += ( "arg" -> taint.get( "arg" ).getOrElse( null ) )
//Stores the name of the argument
taintString += ( "arg_name" -> strings.get( taint.get( "arg_name" ).getOrElse( null ) ).getOrElse( null ) )
//Stores the direction indicator, currently unused since we will be creating this diagram using a parent record
taintString += ( "dir" -> taint.get( "dir" ).getOrElse( null ) )
//Stores the taint identifier
taintString += ( "taint_id" -> taint.get( "id" ).getOrElse( null ) )
//Stores the site identifier
taintString += ( "site_id" -> taint.get( "id" ).getOrElse( null ) )
//Stores the trace type record
//TODO: Create lookup table for the trace type values
taintString += ( "trace_type" -> taint.get( "trace_type" ).getOrElse( null ) )
//Store the results of the mappings above
val taintMap = taintString.toMap
//add the above to the list of taintStrings, and make it part of a lookup table by mapping it by taint_id
taintStrings += ( taint.get( "id" ).getOrElse( null ) -> taintMap )
}
return taintStrings
}
/**
* Function to take the list of sites, and add to them the appropriate strings from the list. Adds this information to the
* parsedData object
*
* @param siteList List of sites populated with string number mappings
* @param strings List of strings that map to numbers
* @param files List of files that map to file_ids
* @param parsedData Object that will contain our parsed site data
*/
def mapStringToSites( siteList : ListBuffer[ Map[ String, String ] ], strings : mutable.ListMap[ String, String ],
files: mutable.ListMap[ String, String ], parsedData : ParsedAppScanSourceXmlData )
{
//iterate through the set of sites and map string values and file values to them
for( site <- siteList )
{
//Object to contain the new site with full mappings from files and strings
var siteStrings = new mutable.ListMap[ String, String ]
//append the file name
siteStrings += ( "file" -> files.get( site.get( "file_id" ).getOrElse( null ) ).getOrElse( null ) )
//append the caller name (usually a method name, but can also be a class name) mapped from a string
siteStrings += ( "caller" -> strings.get( site.get( "caller" ).getOrElse( null ) ).getOrElse( null ) )
//append the column number, currently unused
siteStrings += ( "cn" -> site.get( "cn" ).getOrElse( null ) )
//append the context data mapped from a string
siteStrings += ( "ctx" -> strings.get( site.get( "ctx" ).getOrElse( null ) ).getOrElse( null ) )
//append the line number of the vulnerability
siteStrings += ( "ln" -> site.get( "ln" ).getOrElse( null ) )
//append the vulnerability identifier to the object
siteStrings += ( "method" -> strings.get( site.get( "method" ).getOrElse( null ) ).getOrElse( null ) )
//append the ordinal value of the vulnerability, currently unused
siteStrings += ( "ord" -> site.get( "ord" ).getOrElse( null ) )
//append the signature of the vulnerability, appears to be deprecated, currently unused
siteStrings += ( "sig" -> site.get( "sig" ).getOrElse( null ) )
//append the entity name for the vulnerability, if one exists
if( site.get( "entity_name" ).getOrElse( null ) != null )
{
siteStrings += ( "entity_name" -> strings.get( site( "entity_name" ) ).getOrElse( null ) )
}
//append the entity type for this vulnerability, if one exists
if( site.get( "entity_type" ).getOrElse( null ) != null )
{
siteStrings += ( "entity_type" -> strings.get( site( "entity_type" ) ).getOrElse( null ) )
}
//add the site to the container object for use later
parsedData.addSiteStrings( site.get( "id" ).getOrElse( null ), siteStrings.toMap )
}
}
} | blackboard/appscan-source-parser | src/main/models/data/AppScanStringMatcher.scala | Scala | bsd-3-clause | 5,246 |
/*
* This file is part of Apparat.
*
* Copyright (C) 2010 Joa Ebert
* http://www.joa-ebert.com/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package apparat.abc
object AbcNameKind {
val QName = 0x07
val QNameA = 0x0d
val RTQName = 0x0f
val RTQNameA = 0x10
val RTQNameL = 0x11
val RTQNameLA = 0x12
val Multiname = 0x09
val MultinameA = 0x0e
val MultinameL = 0x1b
val MultinameLA = 0x1c
val Typename = 0x1d
}
sealed abstract class AbcName(val kind: Int){
def isRuntimeName = {
import AbcNameKind._
kind match {
case RTQName | RTQNameA => true
case RTQNameL | RTQNameLA => true
case _ => false
}
}
}
case class AbcQName(name: Symbol, namespace: AbcNamespace) extends AbcName(AbcNameKind.QName)
case class AbcQNameA(name: Symbol, namespace: AbcNamespace) extends AbcName(AbcNameKind.QNameA)
case class AbcRTQName(name: Symbol) extends AbcName(AbcNameKind.RTQName)
case class AbcRTQNameA(name: Symbol) extends AbcName(AbcNameKind.RTQNameA)
case object AbcRTQNameL extends AbcName(AbcNameKind.RTQNameL)
case object AbcRTQNameLA extends AbcName(AbcNameKind.RTQNameLA)
case class AbcMultiname(name: Symbol, nsset: AbcNSSet) extends AbcName(AbcNameKind.Multiname)
case class AbcMultinameA(name: Symbol, nsset: AbcNSSet) extends AbcName(AbcNameKind.MultinameA)
case class AbcMultinameL(nsset: AbcNSSet) extends AbcName(AbcNameKind.MultinameL)
case class AbcMultinameLA(nsset: AbcNSSet) extends AbcName(AbcNameKind.MultinameLA)
case class AbcTypename(name: AbcQName, parameters: Array[AbcName]) extends AbcName(AbcNameKind.Typename) {
override def toString = "AbcTypename(" + name + ", [" + (parameters mkString ", ") + "])"
}
| joa/apparat | apparat-core/src/main/scala/apparat/abc/AbcName.scala | Scala | lgpl-2.1 | 2,356 |
package controllers
import Common.PrototypeHtml
import helpers.TestWithApplication
import helpers.changekeeper.CookieFactoryForUnitSpecs
import helpers.UnitSpec
import org.mockito.Mockito.when
import pages.changekeeper.NewKeeperChooseYourAddressPage
import pages.changekeeper.PrivateKeeperDetailsPage.DayDateOfBirthValid
import pages.changekeeper.PrivateKeeperDetailsPage.DriverNumberValid
import pages.changekeeper.PrivateKeeperDetailsPage.EmailValid
import pages.changekeeper.PrivateKeeperDetailsPage.FirstNameValid
import pages.changekeeper.PrivateKeeperDetailsPage.LastNameValid
import pages.changekeeper.PrivateKeeperDetailsPage.MonthDateOfBirthValid
import pages.changekeeper.PrivateKeeperDetailsPage.PostcodeValid
import pages.changekeeper.PrivateKeeperDetailsPage.YearDateOfBirthValid
import pages.changekeeper.VehicleLookupPage
import play.api.i18n.Messages
import play.api.test.FakeRequest
import play.api.test.Helpers.{BAD_REQUEST, contentAsString, defaultAwaitTimeout, LOCATION, OK}
import uk.gov.dvla.vehicles.presentation.common
import common.clientsidesession.ClientSideSessionFactory
import common.mappings.Email.{EmailId => EmailEnterId, EmailVerifyId}
import common.mappings.OptionalToggle
import common.mappings.TitlePickerString
import common.mappings.TitlePickerString.standardOptions
import common.mappings.TitleType
import common.model.PrivateKeeperDetailsFormModel.Form.DriverNumberId
import common.model.PrivateKeeperDetailsFormModel.Form.EmailId
import common.model.PrivateKeeperDetailsFormModel.Form.EmailOptionId
import common.model.PrivateKeeperDetailsFormModel.Form.FirstNameId
import common.model.PrivateKeeperDetailsFormModel.Form.LastNameId
import common.model.PrivateKeeperDetailsFormModel.Form.PostcodeId
import common.model.PrivateKeeperDetailsFormModel.Form.TitleId
import common.services.DateService
import utils.helpers.Config
class PrivateKeeperDetailsUnitSpec extends UnitSpec {
"present" should {
"display the page" in new TestWithApplication {
whenReady(present) { r =>
r.header.status should equal(OK)
}
}
"display prototype message when config set to true" in new TestWithApplication {
contentAsString(present) should include(PrototypeHtml)
}
"not display prototype message when config set to false" in new TestWithApplication {
val request = FakeRequest()
implicit val clientSideSessionFactory = injector.getInstance(classOf[ClientSideSessionFactory])
implicit val config = mock[Config]
implicit val dateService = injector.getInstance(classOf[DateService])
when(config.isPrototypeBannerVisible).thenReturn(false)
val privateKeeperDetailsPrototypeNotVisible = new PrivateKeeperDetails()
val result = privateKeeperDetailsPrototypeNotVisible.present(request)
contentAsString(result) should not include PrototypeHtml
}
"display populated fields when cookie exists" in new TestWithApplication {
val request = FakeRequest()
.withCookies(CookieFactoryForUnitSpecs.vehicleAndKeeperDetailsModel())
.withCookies(CookieFactoryForUnitSpecs.privateKeeperDetailsModel())
val result = privateKeeperDetails.present(request)
val content = contentAsString(result)
content should include(Messages(standardOptions.head))
content should include(FirstNameValid)
content should include(LastNameValid)
content should include(DayDateOfBirthValid)
content should include(MonthDateOfBirthValid)
content should include(YearDateOfBirthValid)
content should include(EmailValid)
}
"display populated other title when cookie exists" in new TestWithApplication {
val request = FakeRequest()
.withCookies(CookieFactoryForUnitSpecs.vehicleAndKeeperDetailsModel())
.withCookies(CookieFactoryForUnitSpecs.privateKeeperDetailsModel(title = TitleType(4, "otherTitle")))
val result = privateKeeperDetails.present(request)
val content = contentAsString(result)
content should include("otherTitle")
}
"display empty fields when cookie does not exist" in new TestWithApplication {
val request = FakeRequest()
.withCookies(CookieFactoryForUnitSpecs.vehicleAndKeeperDetailsModel())
.withCookies(CookieFactoryForUnitSpecs.privateKeeperDetailsModel())
val result = privateKeeperDetails.present(request)
val content = contentAsString(result)
content should include(Messages(standardOptions.head))
content should not include "selected"
}
"redirect to vehicle lookup page when no cookie is present" in new TestWithApplication {
val request = FakeRequest()
val result = privateKeeperDetails.present(request)
whenReady(result) { r =>
r.header.headers.get(LOCATION) should equal(Some(VehicleLookupPage.address))
}
}
}
"submit" should {
"redirect to next page when mandatory fields are complete" in new TestWithApplication {
val request = buildCorrectlyPopulatedRequest(email = "avalid@email.address")
.withCookies(CookieFactoryForUnitSpecs.vehicleAndKeeperDetailsModel())
val result = privateKeeperDetails.submit(request)
whenReady(result) { r =>
r.header.headers.get(LOCATION) should equal (Some(NewKeeperChooseYourAddressPage.address))
}
}
"redirect to next page when all fields are complete" in new TestWithApplication {
val request = buildCorrectlyPopulatedRequest()
.withCookies(CookieFactoryForUnitSpecs.vehicleAndKeeperDetailsModel())
val result = privateKeeperDetails.submit(request)
whenReady(result) { r =>
r.header.headers.get(LOCATION) should equal (Some(NewKeeperChooseYourAddressPage.address))
}
}
"redirect to vehicle lookup page when no cookie is present" in new TestWithApplication {
val request = buildCorrectlyPopulatedRequest(title = "2")
val result = privateKeeperDetails.submit(request)
whenReady(result) { r =>
r.header.headers.get(LOCATION) should equal(Some(VehicleLookupPage.address))
}
}
"return a bad request if no details are entered" in new TestWithApplication {
val request = buildCorrectlyPopulatedRequest(title = "",
firstName = "",
lastName = "")
.withCookies(CookieFactoryForUnitSpecs.vehicleAndKeeperDetailsModel())
val result = privateKeeperDetails.submit(request)
whenReady(result) { r =>
r.header.status should equal(BAD_REQUEST)
}
}
"replace required error message for first name with standard error message" in new TestWithApplication {
val request = buildCorrectlyPopulatedRequest(firstName = "")
.withCookies(CookieFactoryForUnitSpecs.vehicleAndKeeperDetailsModel())
val result = privateKeeperDetails.submit(request)
val errorMessage = "First name - Must contain between 1 and 25 characters from the following A-Z, " +
"hyphen, apostrophe, full stop and space"
val count = errorMessage.r.findAllIn(contentAsString(result)).length
count should equal(1)
}
"replace required error message for last name with standard error message" in new TestWithApplication {
val request = buildCorrectlyPopulatedRequest(lastName = "")
.withCookies(CookieFactoryForUnitSpecs.vehicleAndKeeperDetailsModel())
val result = privateKeeperDetails.submit(request)
val errorMessage = "Last name - Must contain between 1 and 25 characters from the following A-Z, " +
"hyphen, apostrophe, full stop and space"
val count = errorMessage.r.findAllIn(contentAsString(result)).length
count should equal(1)
}
"replace required error message for postcode with standard error message" in new TestWithApplication {
val request = buildCorrectlyPopulatedRequest(postcode = "")
.withCookies(CookieFactoryForUnitSpecs.vehicleAndKeeperDetailsModel())
val result = privateKeeperDetails.submit(request)
val errorMessage = "Must be between five and eight characters and in a valid format, e.g. AB1 2BA or AB12BA"
val count = errorMessage.r.findAllIn(contentAsString(result)).length
count should equal(2)
}
}
private def buildCorrectlyPopulatedRequest(title: String = "1",
firstName: String = FirstNameValid,
lastName: String = LastNameValid,
email: String = EmailValid,
driverNumber: String = DriverNumberValid,
postcode: String = PostcodeValid) = {
FakeRequest().withFormUrlEncodedBody(
s"$TitleId.${TitlePickerString.TitleRadioKey}" -> title,
FirstNameId -> firstName,
LastNameId -> lastName,
EmailOptionId -> OptionalToggle.Visible,
s"$EmailId.$EmailEnterId" -> email,
s"$EmailId.$EmailVerifyId" -> email,
DriverNumberId -> driverNumber,
PostcodeId -> postcode
)
}
private lazy val privateKeeperDetails = {
injector.getInstance(classOf[PrivateKeeperDetails])
}
private lazy val present = {
val request = FakeRequest().
withCookies(CookieFactoryForUnitSpecs.vehicleAndKeeperDetailsModel())
privateKeeperDetails.present(request)
}
}
| dvla/vehicles-change-keeper-online | test/controllers/PrivateKeeperDetailsUnitSpec.scala | Scala | mit | 9,464 |
// code-examples/Rounding/match-list-script.scala
val willWork = List(1, 3, 23, 90)
val willNotWork = List(4, 18, 52)
val empty = List()
def processList(l: List[Any]): Unit = l match {
case head :: tail =>
format("%s ", head)
processList(tail)
case Nil => println("")
}
for (l <- List(willWork, willNotWork, empty)) {
print("List: ")
processList(l)
}
| XClouded/t4f-core | scala/src/tmp/Rounding/match-list-script.scala | Scala | apache-2.0 | 371 |
//
// Scaled Kotlin Project Support - Kotlin project support for Scaled project framework.
// http://github.com/scaled/kotlin-project/blob/master/LICENSE
package scaled.project
import java.nio.file.{Files, Path}
import scaled._
import scaled.pacman.{Pacman, RepoId, Filez}
import scaled.util.{BufferBuilder, Chars, Errors, SubProcess}
object KotlinCompiler {
// matches: "/foo/bar/baz.kt:LL:CC: some error message"
val outputM = Matcher.regexp("""^([^:]+):(\\d+):(\\d+): (warning|error): (.*)""")
/** The default version of kotlinc used if none is specified. */
val DefaultKotlincVersion = "1.0.0-beta-1038"
}
abstract class KotlinCompiler (proj :Project, java :JavaComponent) extends Compiler(proj) {
import KotlinCompiler._
/** Options to pass to `javac`. */
def javacOpts :SeqV[String] = Seq()
/** Options to pass to `kotlinc`. */
def kotlincOpts :SeqV[String] = Seq()
/** The version of the Kotlin compiler to use. */
def kotlincVers :String = DefaultKotlincVersion
/** The module name to supply to the kotlin compiler. */
def moduleName :Option[String] = None
val log = proj.metaSvc.log
// val compileSvc = proj.metaSvc.service[KotlinCompilerService]
// override def reset () {} // NOOP!
override def describeEngine = "kotlic"
override def describeOptions (bb :BufferBuilder) {
bb.addKeyValue("kotlinc: ", if (kotlincOpts.isEmpty) "<none>" else kotlincOpts.mkString(" "))
bb.addKeyValue("kcvers: ", kotlincVers)
}
protected def compile (buffer :Buffer, file :Option[Path]) =
compile(buffer, file, proj.sourceDirs, java.buildClasspath, java.outputDir)
/** A hook called just before we initiate compilation. */
protected def willCompile () {}
protected def compile (buffer :Buffer, file :Option[Path], sourceDirs :SeqV[Path],
classpath :SeqV[Path], output :Path) = {
// if we're not doing an incremental recompile, clean the output dir first
if (!file.isDefined) {
Filez.deleteAll(java.outputDir)
Files.createDirectories(java.outputDir)
}
// now call down to the project which may copy things back into the output dir
willCompile()
// resolve the appropriate version of kotlinc
val kotlincId = s"org.jetbrains.kotlin:kotlin-compiler:$kotlincVers"
val pathSep = System.getProperty("path.separator")
val kotlinCompilerPath = Pacman.repo.mvn.resolve(RepoId.parse(kotlincId)).values.
mkString(pathSep)
// enumerate the to-be-compiled source files
val sources = Seq.builder[String]()
def addSrc (p :Path) = if (p.getFileName.toString endsWith ".kt") sources += p.toString
file match {
case None => Project.onFiles(sourceDirs, addSrc)
case Some(p) => addSrc(p)
}
val moduleOpts = moduleName match {
case Some(name) => Seq("-module-name", name)
case None => Seq()
}
val result = Promise[Boolean]()
if (sources.isEmpty) result.succeed(true)
else {
// create our command line
val cmd = Seq[String](
"java",
"-cp",
kotlinCompilerPath,
"org.jetbrains.kotlin.cli.jvm.K2JVMCompiler",
"-cp",
classpath.mkString(pathSep),
"-d",
output.toString
) ++ kotlincOpts ++ moduleOpts ++ sources
// fork off a java process to run the kotlin compiler
SubProcess(SubProcess.Config(cmd.toArray, cwd=proj.root.path),
proj.metaSvc.exec, buffer, result.succeed)
}
result
}
protected def nextNote (buffer :Buffer, start :Loc) = {
buffer.findForward(outputM, start) match {
case Loc.None => Compiler.NoMoreNotes
case ploc => try {
val file = proj.root.path.resolve(outputM.group(1))
val eline = outputM.group(2).toInt-1
val ecol = outputM.group(3).toInt-1
val ekind = outputM.group(4)
val errPre = outputM.group(5).trim
// every line after the path with leading whitespace is part of the message
val desc = Seq.builder[String]()
desc += errPre
var pnext = ploc.nextStart
while (pnext < buffer.end && buffer.line(pnext).indexOf(Chars.isWhitespace) == 0) {
desc += buffer.line(pnext).asString
pnext = pnext.nextStart
}
(Compiler.Note(Store(file), Loc(eline, ecol), desc.build(), ekind == "error"), pnext)
} catch {
case e :Exception => log.log("Error parsing error buffer", e) ; Compiler.NoMoreNotes
}
}
}
}
| scaled/kotlin-project | src/scala/scaled/project/KotlinCompiler.scala | Scala | bsd-3-clause | 4,485 |
package nl.rabobank.oss.rules.dsl.nl.grammar
import nl.rabobank.oss.rules.facts.SingularFact
import nl.rabobank.oss.rules.finance.nl.{Bedrag, Percentage}
import org.scalatest.{FlatSpec, Matchers}
class DslEvaluationTest extends FlatSpec with Matchers {
val sutBedrag = new SingularFact[Bedrag]("testFactBedrag")
val sutBigDecimal = new SingularFact[BigDecimal]("testFactBigDecimal")
val sutString = new SingularFact[String]("testFactString")
val sutPercentage = new SingularFact[Percentage]("testFactPercentage")
it should "compile" in {
-sutBedrag
sutBedrag + sutBedrag
sutBedrag - sutBedrag
sutBedrag / sutBedrag
sutBedrag / sutBigDecimal
sutBedrag * sutBigDecimal
-sutBigDecimal
sutBigDecimal + sutBigDecimal
sutBigDecimal - sutBigDecimal
sutBigDecimal * sutBedrag
sutBigDecimal / sutBigDecimal
sutBigDecimal * sutBigDecimal
sutBigDecimal * sutPercentage
sutPercentage * sutBigDecimal
sutBedrag * sutPercentage
sutPercentage * sutBedrag
}
it should "not compile" in {
"-sutString" shouldNot compile
"sutBedrag + sutString" shouldNot compile
"sutBedrag + sutBigDecimal" shouldNot compile
"sutBedrag - sutBigDecimal" shouldNot compile
"sutBedrag * sutBedrag" shouldNot compile
"sutBigDecimal + sutBedrag" shouldNot compile
"sutBigDecimal - sutBedrag" shouldNot compile
"-sutPercentage" shouldNot compile
"sutPercentage + sutPercentage" shouldNot compile
"sutPercentage - sutPercentage" shouldNot compile
}
}
| scala-rules/scala-rules | engine/src/test/scala/nl/rabobank/oss/rules/dsl/nl/grammar/DslEvaluationTest.scala | Scala | mit | 1,534 |
package app.circumstances
import utils.WithJsBrowser
import app.FunctionalTestCommon
import utils.pageobjects.circumstances.start_of_process.GReportChangesPage
import utils.pageobjects.xml_validation.{XMLBusinessValidation, XMLCircumstancesBusinessValidation}
import utils.pageobjects.{Page, PageObjects, TestData, XmlPage}
/**
* End-to-End functional tests using input files created by Steve Moody.
* @author Jorge Migueis
* Date: 02/08/2013
*/
class FunctionalTestCase19Spec extends FunctionalTestCommon {
isolated
section("functional")
"The application Circumstances" should {
"Successfully run absolute Circumstances Test Case 19" in new WithJsBrowser with PageObjects {
val page = GReportChangesPage(context)
val circs = TestData.readTestDataFromFile("/functional_scenarios/circumstances/TestCase19.csv")
page goToThePage()
val lastPage = page runClaimWith(circs)
lastPage match {
case p: XmlPage => {
val validator: XMLBusinessValidation = new XMLCircumstancesBusinessValidation
validateAndPrintErrors(p, circs, validator) should beTrue
}
case p: Page => println(p.source)
}
}
}
section("functional")
}
| Department-for-Work-and-Pensions/ClaimCapture | c3/test/app/circumstances/FunctionalTestCase19Spec.scala | Scala | mit | 1,224 |
package org.mitlware
import scalaz._
import scalaz.MonadState._
trait IterationsView {
def getIterations : Int
def setIterations( i : Int ) : this.type
}
object Iterations {
def get[Env <: IterationsView] : State[Env,Int] = State {s => (s,s.getIterations) }
def set[Env <: IterationsView]( i : Int ) : State[Env,Unit] = State {s => (s.setIterations(i), ())}
// TODO: replace with explicit iteration combinator since Scala tail-call optimisation hates monads
def next[Env <: IterationsView] : State[Env,Int] = for {
i <- Iterations.get
_ <- Iterations.set(i+1)
} yield (i+1)
} | JerrySwan/MitLware-scala | src/org/mitlware/Iterations.scala | Scala | bsd-3-clause | 610 |
package org.jetbrains.plugins.scala.failed.annotator
import org.jetbrains.plugins.scala.PerfCycleTests
import org.jetbrains.plugins.scala.base.ScalaLightCodeInsightFixtureTestAdapter
import org.junit.experimental.categories.Category
/**
* User: Dmitry.Naydanov
* Date: 27.03.16.
*/
@Category(Array(classOf[PerfCycleTests]))
class AlreadyDefinedTest extends ScalaLightCodeInsightFixtureTestAdapter {
def testSCL2101(): Unit =
checkTextHasNoErrors(
"""
|class Some(name: Int) {
| def name {""}
|}
""".stripMargin)
def testSCL5789(): Unit =
checkTextHasNoErrors(
"""
|class Test {
| private[this] val x = 1
| def x() = 2
|}
""".stripMargin)
def testSCL11277(): Unit =
checkTextHasNoErrors(
"""
|trait Functor[F[_]] {
| def map[A, B](fa: F[A])(f: A => B): F[B]
|}
|
|trait Applicative[F[_]] extends Functor[F] {
| def unit[A](a: A): F[A]
|
| def apply[A, B](fa: F[A])(fab: F[A => B]): F[B] =
| map2(fa, fab)((a, ab) => ab(a))
|
| def map2[A, B, C](fa: F[A], fb: F[B])(f: (A, B) => C): F[C] =
| apply(fb)(map(fa)(f.curried))
|
| def map[A, B](fa: F[A])(f: A => B): F[B] =
| map2(fa, unit(()))((a, _) => f(a))
|
| def compose[G[_]](G: Applicative[G]): Applicative[({type f[x] = F[G[x]]})#f] = {
| val self = this
| new Applicative[({type f[x] = F[G[x]]})#f] {
| def unit[A](a: A): F[G[A]] = self.unit(G.unit(a))
| }
| }
|}
""".stripMargin)
}
| ilinum/intellij-scala | test/org/jetbrains/plugins/scala/failed/annotator/AlreadyDefinedTest.scala | Scala | apache-2.0 | 1,675 |
import org.stormenroute.mecha._
import sbt._
import sbt.Keys._
object Build extends MechaRepoBuild {
lazy val buildSettings = Defaults.defaultSettings ++
MechaRepoPlugin.defaultSettings ++ Seq(
name := "akka-docker",
scalaVersion := "2.11.7",
version := "0.1",
organization := "com.cleawing",
libraryDependencies ++= superRepoDependencies("akka-docker") ++
Dependencies.akka ++ Dependencies.akkaStreams ++
Seq(Dependencies.bouncyCastleProvider, Dependencies.json4s, Dependencies.scalaTest)
)
def repoName = "akka-docker"
lazy val akkaDocker: Project = Project(
"akka-docker",
file("."),
settings = buildSettings
) dependsOnSuperRepo
}
| Cleawing/akka-docker | project/Build.scala | Scala | apache-2.0 | 696 |
package pl.touk.nussknacker.engine.management
import org.apache.flink.configuration.{Configuration, CoreOptions}
import org.scalatest.{FunSuite, Matchers}
import pl.touk.nussknacker.engine.build.ScenarioBuilder
import pl.touk.nussknacker.engine.deployment.ExternalDeploymentId
import pl.touk.nussknacker.engine.management.FlinkSlotsChecker.{NotEnoughSlotsException, SlotsBalance}
import pl.touk.nussknacker.engine.management.rest.HttpFlinkClient
import pl.touk.nussknacker.engine.management.rest.flinkRestModel._
import pl.touk.nussknacker.test.PatientScalaFutures
import sttp.client.testing.SttpBackendStub
import sttp.client.{NothingT, Response, SttpBackend, SttpClientException}
import sttp.model.{Method, StatusCode}
import java.net.ConnectException
import java.util.Collections
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success, Try}
class FlinkSlotsCheckerTest extends FunSuite with Matchers with PatientScalaFutures {
private implicit val ec: ExecutionContext = ExecutionContext.Implicits.global
private val config = FlinkConfig("http://test.pl", None)
private val availableSlotsCount = 1000
test("check available slots count") {
val slotsChecker = createSlotsChecker()
slotsChecker.checkRequiredSlotsExceedAvailableSlots(prepareCanonicalProcess(Some(availableSlotsCount)), None).futureValue
val requestedSlotsCount = availableSlotsCount + 1
slotsChecker.checkRequiredSlotsExceedAvailableSlots(prepareCanonicalProcess(Some(requestedSlotsCount)), None).failed.futureValue shouldEqual
NotEnoughSlotsException(availableSlotsCount, availableSlotsCount, SlotsBalance(0, requestedSlotsCount))
}
test("take an account of slots that will be released be job that will be cancelled during redeploy") {
val slotsChecker = createSlotsChecker()
// +1 because someCurrentJobId uses one slot now
slotsChecker.checkRequiredSlotsExceedAvailableSlots(prepareCanonicalProcess(Some(availableSlotsCount + 1)), Some(ExternalDeploymentId("someCurrentJobId"))).futureValue
val requestedSlotsCount = availableSlotsCount + 2
slotsChecker.checkRequiredSlotsExceedAvailableSlots(prepareCanonicalProcess(Some(requestedSlotsCount)), Some(ExternalDeploymentId("someCurrentJobId"))).failed.futureValue shouldEqual
NotEnoughSlotsException(availableSlotsCount, availableSlotsCount, SlotsBalance(1, requestedSlotsCount))
}
test("check available slots count when parallelism is not defined") {
val slotsChecker = createSlotsChecker(clusterOverviewResult = Success(ClusterOverview(`slots-total` = 0, `slots-available` = 0)))
slotsChecker.checkRequiredSlotsExceedAvailableSlots(prepareCanonicalProcess(None), None).failed.futureValue shouldEqual
NotEnoughSlotsException(0, 0, SlotsBalance(0, CoreOptions.DEFAULT_PARALLELISM.defaultValue()))
}
test("omit slots checking if flink api returned error during cluster overview") {
val slotsChecker = createSlotsChecker(clusterOverviewResult = Failure(new ConnectException("Some connect error")))
slotsChecker.checkRequiredSlotsExceedAvailableSlots(prepareCanonicalProcess(Some(availableSlotsCount)), None).futureValue
}
test("omit slots checking if flink api returned error during jobmanager config") {
val slotsChecker = createSlotsChecker(jobManagerConfigResult = Failure(new ConnectException("Some connect error")))
slotsChecker.checkRequiredSlotsExceedAvailableSlots(prepareCanonicalProcess(None), None).futureValue
}
private def createSlotsChecker(statuses: List[JobOverview] = List(),
statusCode: StatusCode = StatusCode.Ok,
clusterOverviewResult: Try[ClusterOverview] = Success(ClusterOverview(`slots-total` = 1000, `slots-available` = availableSlotsCount)),
jobManagerConfigResult: Try[Configuration] = Success(Configuration.fromMap(Collections.emptyMap())) // be default used config with all default values
): FlinkSlotsChecker = {
import scala.collection.JavaConverters._
val slotsChecker = createSlotsCheckerWithBackend(SttpBackendStub.asynchronousFuture.whenRequestMatchesPartial { case req =>
val toReturn = (req.uri.path, req.method) match {
case (List("jobs", "overview"), Method.GET) =>
JobsResponse(statuses)
case (List("jobs", jobId, "config"), Method.GET) =>
JobConfig(jobId, ExecutionConfig(`job-parallelism` = 1, `user-config` = Map.empty))
case (List("overview"), Method.GET) =>
clusterOverviewResult.recoverWith {
case ex: Exception => Failure(SttpClientException.defaultExceptionToSttpClientException(ex).get)
}.get
case (List("jobmanager", "config"), Method.GET) =>
jobManagerConfigResult.map(_.toMap.asScala.toList.map {
case (key, value) => KeyValueEntry(key, value)
}).recoverWith {
case ex: Exception => Failure(SttpClientException.defaultExceptionToSttpClientException(ex).get)
}.get
}
Response(Right(toReturn), statusCode)
})
slotsChecker
}
private def createSlotsCheckerWithBackend(backend: SttpBackend[Future, Nothing, NothingT]): FlinkSlotsChecker = {
implicit val b: SttpBackend[Future, Nothing, NothingT] = backend
new FlinkSlotsChecker(new HttpFlinkClient(config))
}
private def prepareCanonicalProcess(parallelism: Option[Int]) = {
val baseProcessBuilder = ScenarioBuilder.streaming("processTestingTMSlots")
parallelism.map(baseProcessBuilder.parallelism).getOrElse(baseProcessBuilder)
.source("startProcess", "kafka-transaction")
.emptySink("endSend", "sendSms")
.toCanonicalProcess
}
}
| TouK/nussknacker | engine/flink/management/src/test/scala/pl/touk/nussknacker/engine/management/FlinkSlotsCheckerTest.scala | Scala | apache-2.0 | 5,747 |
/** This file is part of TextCompose, a program for producing PDF from text files.
* Copyright 2014 Jesper S Villadsen <jeschvi@gmail.com>
* License: GNU Affero General Public License version 3 or later.
* For full license text see LICENSE.txt or <http://www.gnu.org/licenses/>.
*/
package textcompose.core
import scala.collection.mutable.{ Stack, HashMap }
import scala.collection.immutable.List
import scala.io._
import scala.util.matching.Regex
import com.itextpdf.text.pdf.BaseFont
import textcompose.storage
object FontFileRegister {
private val directories = new Stack[String]
/*
* Short font Id:
* Font file name excluding extension.
* In the case of True Type Collection, it is instead the name
* of each font in the collection.
*
* Long font Id:
* Absolute font file name including extension.
* Used for registering the font at the iText font factory
* and for creating the iText base font.
* In the case of True Type Collection, it is post-fixed a comma
* and each index in the collection.
*/
private val fontIdShortToLong = new HashMap[String, String]
val builtInFonts = List("Courier", "Helvetica", "Times", "Symbol", "Zapfdingbats")
def addBuildInFonts() { for (f <- builtInFonts) fontIdShortToLong(f) = "" }
def addDirectory(directory: String) {
def addFont(shortId: String, longId: String) {
if (!fontIdShortToLong.contains(shortId)) {
fontIdShortToLong(shortId) = longId
}
}
def addTrueTypeCollection(nameBeforeExtension: String, absolutePathToFont: String) {
try {
val names = BaseFont.enumerateTTCNames(absolutePathToFont) // TTC file may be broken
for (i <- 0 until names.length) {
addFont(names(i), absolutePathToFont + "," + i.toString)
}
} catch {
case e: Exception => addFont(nameBeforeExtension, absolutePathToFont)
}
}
def addFile(file: java.io.File) {
val fileName = file.getName
val absolutePathToFont = file.getAbsolutePath
val (nameBeforeExtension, fileExtension) =
storage.FileMethods.splitFileNameAtLastPeriod(fileName)
if (fileExtension.toLowerCase() == "ttc") {
addTrueTypeCollection(nameBeforeExtension, absolutePathToFont)
} else {
addFont(nameBeforeExtension, absolutePathToFont)
}
}
def traverseDirectory(directory: String) {
val fontDirectory = new java.io.File(directory)
val listOfFiles = fontDirectory.listFiles()
for (file <- listOfFiles) {
if (file.isDirectory()) {
traverseDirectory(file.getAbsolutePath)
} else {
addFile(file)
}
}
}
if (!directories.contains(directory) && textcompose.storage.FileMethods.IsDirectory(directory)) {
directories.push(directory)
traverseDirectory(directory)
}
}
def recalculate() {
def clear() {
directories.clear()
fontIdShortToLong.clear()
}
val fontDirectories = directories.toList // toList to copy before clear.
clear()
for (d <- fontDirectories) addDirectory(d)
}
def isBuiltIn(shortFontId: String): Boolean = builtInFonts.contains(shortFontId)
def exists(shortFontId: String): Boolean = fontIdShortToLong.contains(shortFontId)
def getLongFontId(shortFontId: String): String = fontIdShortToLong(shortFontId)
def getShortFontIds: scala.collection.immutable.List[String] = fontIdShortToLong.keys.toList
} | jvilladsen/TextCompose | src/main/scala/core/FontFileRegister.scala | Scala | agpl-3.0 | 3,490 |
package dotty.tools.dotc
package parsing
package xml
import scala.language.unsafeNulls
import scala.collection.mutable
/**
* The `Utility` object provides utility functions for processing instances
* of bound and not bound XML classes, as well as escaping text nodes.
*
* @author Burak Emir
*/
object Utility {
import util.Chars.SU
private val unescMap = Map(
"lt" -> '<',
"gt" -> '>',
"amp" -> '&',
"quot" -> '"',
"apos" -> '\''
)
/**
* Appends unescaped string to `s`, `amp` becomes `&`,
* `lt` becomes `<` etc..
*
* @return `'''null'''` if `ref` was not a predefined entity.
*/
private final def unescape(ref: String, s: StringBuilder): StringBuilder =
((unescMap get ref) map (s append _)).orNull
def parseAttributeValue[T](value: String, text: String => T, entityRef: String => T): List[T] = {
val sb = new StringBuilder
var rfb: StringBuilder = null
val nb = new mutable.ListBuffer[T]()
val it = value.iterator
while (it.hasNext) {
var c = it.next()
// entity! flush buffer into text node
if (c == '&') {
c = it.next()
if (c == '#') {
c = it.next()
val theChar = parseCharRef ({ ()=> c },{ () => c = it.next() },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)})
sb.append(theChar)
}
else {
if (rfb eq null) rfb = new StringBuilder()
rfb append c
c = it.next()
while (c != ';') {
rfb.append(c)
c = it.next()
}
val ref = rfb.toString()
rfb.clear()
unescape(ref,sb) match {
case null =>
if (!sb.isEmpty) { // flush buffer
nb += text(sb.toString())
sb.clear()
}
nb += entityRef(ref) // add entityref
case _ =>
}
}
}
else sb append c
}
if (!sb.isEmpty) // flush buffer
nb += text(sb.toString())
nb.toList
}
/**
* {{{
* CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
* | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
* }}}
* See [66]
*/
def parseCharRef(ch: () => Char, nextch: () => Unit, reportSyntaxError: String => Unit, reportTruncatedError: String => Unit): String = {
val hex = ch() == 'x'
if (hex) nextch()
val base = if (hex) 16 else 10
var i = 0
while (ch() != ';') {
ch() match {
case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
i = i * base + ch().asDigit
case 'a' | 'b' | 'c' | 'd' | 'e' | 'f'
| 'A' | 'B' | 'C' | 'D' | 'E' | 'F' =>
if (! hex)
reportSyntaxError("hex char not allowed in decimal char ref\n" +
"Did you mean to write &#x ?")
else
i = i * base + ch().asDigit
case SU =>
reportTruncatedError("")
case _ =>
reportSyntaxError("character '" + ch() + "' not allowed in char ref\n")
}
nextch()
}
new String(Array(i), 0, 1)
}
/** {{{
* (#x20 | #x9 | #xD | #xA)
* }}} */
final def isSpace(ch: Char): Boolean = ch match {
case '\u0009' | '\u000A' | '\u000D' | '\u0020' => true
case _ => false
}
/** {{{
* (#x20 | #x9 | #xD | #xA)+
* }}} */
final def isSpace(cs: Seq[Char]): Boolean = cs.nonEmpty && (cs forall isSpace)
/** {{{
* NameChar ::= Letter | Digit | '.' | '-' | '_' | ':'
* | CombiningChar | Extender
* }}}
* See [4] and Appendix B of XML 1.0 specification.
*/
def isNameChar(ch: Char): Boolean = {
import java.lang.Character._
// The constants represent groups Mc, Me, Mn, Lm, and Nd.
isNameStart(ch) || (getType(ch).toByte match {
case COMBINING_SPACING_MARK |
ENCLOSING_MARK | NON_SPACING_MARK |
MODIFIER_LETTER | DECIMAL_DIGIT_NUMBER => true
case _ => ".-:" contains ch
})
}
/** {{{
* NameStart ::= ( Letter | '_' )
* }}}
* where Letter means in one of the Unicode general
* categories `{ Ll, Lu, Lo, Lt, Nl }`.
*
* We do not allow a name to start with `:`.
* See [3] and Appendix B of XML 1.0 specification
*/
def isNameStart(ch: Char): Boolean = {
import java.lang.Character._
getType(ch).toByte match {
case LOWERCASE_LETTER |
UPPERCASE_LETTER | OTHER_LETTER |
TITLECASE_LETTER | LETTER_NUMBER => true
case _ => ch == '_'
}
}
/** {{{
* Name ::= ( Letter | '_' ) (NameChar)*
* }}}
* See [5] of XML 1.0 specification.
*/
def isName(s: String): Boolean =
s.nonEmpty && isNameStart(s.head) && (s.tail forall isNameChar)
}
| dotty-staging/dotty | compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala | Scala | apache-2.0 | 4,931 |
package controllers
import twentysix.playr._
import twentysix.playr.simple._
import play.api.mvc._
import play.api.libs.json.Json
import models._
case class EmployeeController(company: Company)(implicit employeeContainer: EmployeeContainer, personContainer: PersonContainer) extends RestRwdController[Employee] with LoggingFilter{
val name = "employee"
implicit val employeeFormat = Json.format[Employee]
def fromId(sid: String) = toInt(sid).flatMap(id => employeeContainer.get(id))
def list = Action { Ok(Json.toJson(employeeContainer.filterList(_.companyId==company.id))) }
def read(employee: Employee) = Action { Ok(Json.toJson(employee)) }
def delete(employee: Employee) = Action {
employeeContainer.delete(employee)
NoContent
}
def update(employee: Employee) = Action { request =>
request.body.asText match {
case Some(function) =>
Ok(Json.toJson(employeeContainer.update(employee.copy(function=function))))
case None => BadRequest("Invalid name")
}
}
def create = Action(parse.json) { request =>
val employee = for {
personId <- (request.body \\ "personId").asOpt[Int]
person <- personContainer.get(personId)
function <- (request.body \\ "function").asOpt[String]
} yield employeeContainer.add(company, person, function)
employee.map(e=>Created(Json.toJson(e))).getOrElse(BadRequest)
}
def function(employee: Employee) = Action( Ok(employee.function) )
}
| 26lights/PlayR-swagger | samples/playr-swagger-tutorial/app/controllers/EmployeeController.scala | Scala | bsd-3-clause | 1,471 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.analysis.SimpleAnalyzer
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext
import org.apache.spark.sql.catalyst.expressions.objects.AssertNotNull
import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, Project}
import org.apache.spark.sql.types._
class NullExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
def testAllTypes(testFunc: (Any, DataType) => Unit): Unit = {
testFunc(false, BooleanType)
testFunc(1.toByte, ByteType)
testFunc(1.toShort, ShortType)
testFunc(1, IntegerType)
testFunc(1L, LongType)
testFunc(1.0F, FloatType)
testFunc(1.0, DoubleType)
testFunc(Decimal(1.5), DecimalType(2, 1))
testFunc(new java.sql.Date(10), DateType)
testFunc(new java.sql.Timestamp(10), TimestampType)
testFunc("abcd", StringType)
}
test("isnull and isnotnull") {
testAllTypes { (value: Any, tpe: DataType) =>
checkEvaluation(IsNull(Literal.create(value, tpe)), false)
checkEvaluation(IsNotNull(Literal.create(value, tpe)), true)
checkEvaluation(IsNull(Literal.create(null, tpe)), true)
checkEvaluation(IsNotNull(Literal.create(null, tpe)), false)
}
}
test("AssertNotNUll") {
val ex = intercept[RuntimeException] {
evaluateWithoutCodegen(AssertNotNull(Literal(null), Seq.empty[String]))
}.getMessage
assert(ex.contains("Null value appeared in non-nullable field"))
}
test("IsNaN") {
checkEvaluation(IsNaN(Literal(Double.NaN)), true)
checkEvaluation(IsNaN(Literal(Float.NaN)), true)
checkEvaluation(IsNaN(Literal(math.log(-3))), true)
checkEvaluation(IsNaN(Literal.create(null, DoubleType)), false)
checkEvaluation(IsNaN(Literal(Double.PositiveInfinity)), false)
checkEvaluation(IsNaN(Literal(Float.MaxValue)), false)
checkEvaluation(IsNaN(Literal(5.5f)), false)
}
test("nanvl") {
checkEvaluation(NaNvl(Literal(5.0), Literal.create(null, DoubleType)), 5.0)
checkEvaluation(NaNvl(Literal.create(null, DoubleType), Literal(5.0)), null)
checkEvaluation(NaNvl(Literal.create(null, DoubleType), Literal(Double.NaN)), null)
checkEvaluation(NaNvl(Literal(Double.NaN), Literal(5.0)), 5.0)
checkEvaluation(NaNvl(Literal(Double.NaN), Literal.create(null, DoubleType)), null)
assert(NaNvl(Literal(Double.NaN), Literal(Double.NaN)).
eval(EmptyRow).asInstanceOf[Double].isNaN)
}
test("coalesce") {
testAllTypes { (value: Any, tpe: DataType) =>
val lit = Literal.create(value, tpe)
val nullLit = Literal.create(null, tpe)
checkEvaluation(Coalesce(Seq(nullLit)), null)
checkEvaluation(Coalesce(Seq(lit)), value)
checkEvaluation(Coalesce(Seq(nullLit, lit)), value)
checkEvaluation(Coalesce(Seq(nullLit, lit, lit)), value)
checkEvaluation(Coalesce(Seq(nullLit, nullLit, lit)), value)
}
val coalesce = Coalesce(Seq(
Literal.create(null, ArrayType(IntegerType, containsNull = false)),
Literal.create(Seq(1, 2, 3), ArrayType(IntegerType, containsNull = false)),
Literal.create(Seq(1, 2, 3, null), ArrayType(IntegerType, containsNull = true))))
assert(coalesce.dataType === ArrayType(IntegerType, containsNull = true))
checkEvaluation(coalesce, Seq(1, 2, 3))
}
test("SPARK-16602 Nvl should support numeric-string cases") {
def analyze(expr: Expression): Expression = {
val relation = LocalRelation()
SimpleAnalyzer.execute(Project(Seq(Alias(expr, "c")()), relation)).expressions.head
}
val intLit = Literal.create(1, IntegerType)
val doubleLit = Literal.create(2.2, DoubleType)
val stringLit = Literal.create("c", StringType)
val nullLit = Literal.create(null, NullType)
val floatNullLit = Literal.create(null, FloatType)
val floatLit = Literal.create(1.01f, FloatType)
val timestampLit = Literal.create("2017-04-12", TimestampType)
val decimalLit = Literal.create(10.2, DecimalType(20, 2))
assert(analyze(new Nvl(decimalLit, stringLit)).dataType == StringType)
assert(analyze(new Nvl(doubleLit, decimalLit)).dataType == DoubleType)
assert(analyze(new Nvl(decimalLit, doubleLit)).dataType == DoubleType)
assert(analyze(new Nvl(decimalLit, floatLit)).dataType == DoubleType)
assert(analyze(new Nvl(floatLit, decimalLit)).dataType == DoubleType)
assert(analyze(new Nvl(timestampLit, stringLit)).dataType == StringType)
assert(analyze(new Nvl(intLit, doubleLit)).dataType == DoubleType)
assert(analyze(new Nvl(intLit, stringLit)).dataType == StringType)
assert(analyze(new Nvl(stringLit, doubleLit)).dataType == StringType)
assert(analyze(new Nvl(doubleLit, stringLit)).dataType == StringType)
assert(analyze(new Nvl(nullLit, intLit)).dataType == IntegerType)
assert(analyze(new Nvl(doubleLit, nullLit)).dataType == DoubleType)
assert(analyze(new Nvl(nullLit, stringLit)).dataType == StringType)
assert(analyze(new Nvl(floatLit, stringLit)).dataType == StringType)
assert(analyze(new Nvl(floatLit, doubleLit)).dataType == DoubleType)
assert(analyze(new Nvl(floatNullLit, intLit)).dataType == FloatType)
}
test("AtLeastNNonNulls") {
val mix = Seq(Literal("x"),
Literal.create(null, StringType),
Literal.create(null, DoubleType),
Literal(Double.NaN),
Literal(5f))
val nanOnly = Seq(Literal("x"),
Literal(10.0),
Literal(Float.NaN),
Literal(math.log(-2)),
Literal(Double.MaxValue))
val nullOnly = Seq(Literal("x"),
Literal.create(null, DoubleType),
Literal.create(null, DecimalType.USER_DEFAULT),
Literal(Float.MaxValue),
Literal(false))
checkEvaluation(AtLeastNNonNulls(2, mix), true, EmptyRow)
checkEvaluation(AtLeastNNonNulls(3, mix), false, EmptyRow)
checkEvaluation(AtLeastNNonNulls(3, nanOnly), true, EmptyRow)
checkEvaluation(AtLeastNNonNulls(4, nanOnly), false, EmptyRow)
checkEvaluation(AtLeastNNonNulls(3, nullOnly), true, EmptyRow)
checkEvaluation(AtLeastNNonNulls(4, nullOnly), false, EmptyRow)
}
test("Coalesce should not throw 64kb exception") {
val inputs = (1 to 2500).map(x => Literal(s"x_$x"))
checkEvaluation(Coalesce(inputs), "x_1")
}
test("SPARK-22705: Coalesce should use less global variables") {
val ctx = new CodegenContext()
Coalesce(Seq(Literal("a"), Literal("b"))).genCode(ctx)
assert(ctx.inlinedMutableStates.size == 1)
}
test("AtLeastNNonNulls should not throw 64kb exception") {
val inputs = (1 to 4000).map(x => Literal(s"x_$x"))
checkEvaluation(AtLeastNNonNulls(1, inputs), true)
}
}
| tejasapatil/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/NullExpressionsSuite.scala | Scala | apache-2.0 | 7,506 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.containerpool
import java.time.Instant
import akka.actor.Status.{Failure => FailureMessage}
import akka.actor.{FSM, Props, Stash}
import akka.event.Logging.InfoLevel
import akka.pattern.pipe
import pureconfig._
import pureconfig.generic.auto._
import scala.collection.immutable
import spray.json.DefaultJsonProtocol._
import spray.json._
import org.apache.openwhisk.common.{AkkaLogging, Counter, LoggingMarkers, TransactionId}
import org.apache.openwhisk.core.ConfigKeys
import org.apache.openwhisk.core.connector.{
ActivationMessage,
CombinedCompletionAndResultMessage,
CompletionMessage,
ResultMessage
}
import org.apache.openwhisk.core.containerpool.logging.LogCollectingException
import org.apache.openwhisk.core.database.UserContext
import org.apache.openwhisk.core.entity.ExecManifest.ImageName
import org.apache.openwhisk.core.entity._
import org.apache.openwhisk.core.entity.size._
import org.apache.openwhisk.core.invoker.InvokerReactive.{ActiveAck, LogsCollector}
import org.apache.openwhisk.http.Messages
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.{Failure, Success}
// States
sealed trait ContainerState
case object Uninitialized extends ContainerState
case object Starting extends ContainerState
case object Started extends ContainerState
case object Running extends ContainerState
case object Ready extends ContainerState
case object Pausing extends ContainerState
case object Paused extends ContainerState
case object Removing extends ContainerState
// Data
/** Base data type */
sealed abstract class ContainerData(val lastUsed: Instant, val memoryLimit: ByteSize, val activeActivationCount: Int) {
/** When ContainerProxy in this state is scheduled, it may result in a new state (ContainerData)*/
def nextRun(r: Run): ContainerData
/**
* Return Some(container) (for ContainerStarted instances) or None(for ContainerNotStarted instances)
* Useful for cases where all ContainerData instances are handled, vs cases where only ContainerStarted
* instances are handled */
def getContainer: Option[Container]
/** String to indicate the state of this container after scheduling */
val initingState: String
/** Inidicates whether this container can service additional activations */
def hasCapacity(): Boolean
}
/** abstract type to indicate an unstarted container */
sealed abstract class ContainerNotStarted(override val lastUsed: Instant,
override val memoryLimit: ByteSize,
override val activeActivationCount: Int)
extends ContainerData(lastUsed, memoryLimit, activeActivationCount) {
override def getContainer = None
override val initingState = "cold"
}
/** abstract type to indicate a started container */
sealed abstract class ContainerStarted(val container: Container,
override val lastUsed: Instant,
override val memoryLimit: ByteSize,
override val activeActivationCount: Int)
extends ContainerData(lastUsed, memoryLimit, activeActivationCount) {
override def getContainer = Some(container)
}
/** trait representing a container that is in use and (potentially) usable by subsequent or concurrent activations */
sealed abstract trait ContainerInUse {
val activeActivationCount: Int
val action: ExecutableWhiskAction
def hasCapacity() =
activeActivationCount < action.limits.concurrency.maxConcurrent
}
/** trait representing a container that is NOT in use and is usable by subsequent activation(s) */
sealed abstract trait ContainerNotInUse {
def hasCapacity() = true
}
/** type representing a cold (not running) container */
case class NoData(override val activeActivationCount: Int = 0)
extends ContainerNotStarted(Instant.EPOCH, 0.B, activeActivationCount)
with ContainerNotInUse {
override def nextRun(r: Run) = WarmingColdData(r.msg.user.namespace.name, r.action, Instant.now, 1)
}
/** type representing a cold (not running) container with specific memory allocation */
case class MemoryData(override val memoryLimit: ByteSize, override val activeActivationCount: Int = 0)
extends ContainerNotStarted(Instant.EPOCH, memoryLimit, activeActivationCount)
with ContainerNotInUse {
override def nextRun(r: Run) = WarmingColdData(r.msg.user.namespace.name, r.action, Instant.now, 1)
}
/** type representing a prewarmed (running, but unused) container (with a specific memory allocation) */
case class PreWarmedData(override val container: Container,
kind: String,
override val memoryLimit: ByteSize,
override val activeActivationCount: Int = 0)
extends ContainerStarted(container, Instant.EPOCH, memoryLimit, activeActivationCount)
with ContainerNotInUse {
override val initingState = "prewarmed"
override def nextRun(r: Run) =
WarmingData(container, r.msg.user.namespace.name, r.action, Instant.now, 1)
}
/** type representing a prewarm (running, but not used) container that is being initialized (for a specific action + invocation namespace) */
case class WarmingData(override val container: Container,
invocationNamespace: EntityName,
action: ExecutableWhiskAction,
override val lastUsed: Instant,
override val activeActivationCount: Int = 0)
extends ContainerStarted(container, lastUsed, action.limits.memory.megabytes.MB, activeActivationCount)
with ContainerInUse {
override val initingState = "warming"
override def nextRun(r: Run) = copy(lastUsed = Instant.now, activeActivationCount = activeActivationCount + 1)
}
/** type representing a cold (not yet running) container that is being initialized (for a specific action + invocation namespace) */
case class WarmingColdData(invocationNamespace: EntityName,
action: ExecutableWhiskAction,
override val lastUsed: Instant,
override val activeActivationCount: Int = 0)
extends ContainerNotStarted(lastUsed, action.limits.memory.megabytes.MB, activeActivationCount)
with ContainerInUse {
override val initingState = "warmingCold"
override def nextRun(r: Run) = copy(lastUsed = Instant.now, activeActivationCount = activeActivationCount + 1)
}
/** type representing a warm container that has already been in use (for a specific action + invocation namespace) */
case class WarmedData(override val container: Container,
invocationNamespace: EntityName,
action: ExecutableWhiskAction,
override val lastUsed: Instant,
override val activeActivationCount: Int = 0)
extends ContainerStarted(container, lastUsed, action.limits.memory.megabytes.MB, activeActivationCount)
with ContainerInUse {
override val initingState = "warmed"
override def nextRun(r: Run) = copy(lastUsed = Instant.now, activeActivationCount = activeActivationCount + 1)
}
// Events received by the actor
case class Start(exec: CodeExec[_], memoryLimit: ByteSize)
case class Run(action: ExecutableWhiskAction, msg: ActivationMessage, retryLogDeadline: Option[Deadline] = None)
case object Remove
// Events sent by the actor
case class NeedWork(data: ContainerData)
case object ContainerPaused
case object ContainerRemoved // when container is destroyed
case object RescheduleJob // job is sent back to parent and could not be processed because container is being destroyed
case class PreWarmCompleted(data: PreWarmedData)
case class InitCompleted(data: WarmedData)
case object RunCompleted
/**
* A proxy that wraps a Container. It is used to keep track of the lifecycle
* of a container and to guarantee a contract between the client of the container
* and the container itself.
*
* The contract is as follows:
* 1. If action.limits.concurrency.maxConcurrent == 1:
* Only one job is to be sent to the ContainerProxy at one time. ContainerProxy
* will delay all further jobs until a previous job has finished.
*
* 1a. The next job can be sent to the ContainerProxy after it indicates available
* capacity by sending NeedWork to its parent.
*
* 2. If action.limits.concurrency.maxConcurrent > 1:
* Parent must coordinate with ContainerProxy to attempt to send only data.action.limits.concurrency.maxConcurrent
* jobs for concurrent processing.
*
* Since the current job count is only periodically sent to parent, the number of jobs
* sent to ContainerProxy may exceed data.action.limits.concurrency.maxConcurrent,
* in which case jobs are buffered, so that only a max of action.limits.concurrency.maxConcurrent
* are ever sent into the container concurrently. Parent will NOT be signalled to send more jobs until
* buffered jobs are completed, but their order is not guaranteed.
*
* 2a. The next job can be sent to the ContainerProxy after ContainerProxy has "concurrent capacity",
* indicated by sending NeedWork to its parent.
*
* 3. A Remove message can be sent at any point in time. Like multiple jobs though,
* it will be delayed until the currently running job finishes.
*
* @constructor
* @param factory a function generating a Container
* @param sendActiveAck a function sending the activation via active ack
* @param storeActivation a function storing the activation in a persistent store
* @param unusedTimeout time after which the container is automatically thrown away
* @param pauseGrace time to wait for new work before pausing the container
*/
class ContainerProxy(factory: (TransactionId,
String,
ImageName,
Boolean,
ByteSize,
Int,
Option[ExecutableWhiskAction]) => Future[Container],
sendActiveAck: ActiveAck,
storeActivation: (TransactionId, WhiskActivation, UserContext) => Future[Any],
collectLogs: LogsCollector,
instance: InvokerInstanceId,
poolConfig: ContainerPoolConfig,
unusedTimeout: FiniteDuration,
pauseGrace: FiniteDuration)
extends FSM[ContainerState, ContainerData]
with Stash {
implicit val ec = context.system.dispatcher
implicit val logging = new AkkaLogging(context.system.log)
var rescheduleJob = false // true iff actor receives a job but cannot process it because actor will destroy itself
var runBuffer = immutable.Queue.empty[Run] //does not retain order, but does manage jobs that would have pushed past action concurrency limit
//track buffer processing state to avoid extra transitions near end of buffer - this provides a pseudo-state between Running and Ready
var bufferProcessing = false
//keep a separate count to avoid confusion with ContainerState.activeActivationCount that is tracked/modified only in ContainerPool
var activeCount = 0;
startWith(Uninitialized, NoData())
when(Uninitialized) {
// pre warm a container (creates a stem cell container)
case Event(job: Start, _) =>
factory(
TransactionId.invokerWarmup,
ContainerProxy.containerName(instance, "prewarm", job.exec.kind),
job.exec.image,
job.exec.pull,
job.memoryLimit,
poolConfig.cpuShare(job.memoryLimit),
None)
.map(container => PreWarmCompleted(PreWarmedData(container, job.exec.kind, job.memoryLimit)))
.pipeTo(self)
goto(Starting)
// cold start (no container to reuse or available stem cell container)
case Event(job: Run, _) =>
implicit val transid = job.msg.transid
activeCount += 1
// create a new container
val container = factory(
job.msg.transid,
ContainerProxy.containerName(instance, job.msg.user.namespace.name.asString, job.action.name.asString),
job.action.exec.image,
job.action.exec.pull,
job.action.limits.memory.megabytes.MB,
poolConfig.cpuShare(job.action.limits.memory.megabytes.MB),
Some(job.action))
// container factory will either yield a new container ready to execute the action, or
// starting up the container failed; for the latter, it's either an internal error starting
// a container or a docker action that is not conforming to the required action API
container
.andThen {
case Success(container) =>
// the container is ready to accept an activation; register it as PreWarmed; this
// normalizes the life cycle for containers and their cleanup when activations fail
self ! PreWarmCompleted(
PreWarmedData(container, job.action.exec.kind, job.action.limits.memory.megabytes.MB, 1))
case Failure(t) =>
// the container did not come up cleanly, so disambiguate the failure mode and then cleanup
// the failure is either the system fault, or for docker actions, the application/developer fault
val response = t match {
case WhiskContainerStartupError(msg) => ActivationResponse.whiskError(msg)
case BlackboxStartupError(msg) => ActivationResponse.developerError(msg)
case _ => ActivationResponse.whiskError(Messages.resourceProvisionError)
}
val context = UserContext(job.msg.user)
// construct an appropriate activation and record it in the datastore,
// also update the feed and active ack; the container cleanup is queued
// implicitly via a FailureMessage which will be processed later when the state
// transitions to Running
val activation = ContainerProxy.constructWhiskActivation(job, None, Interval.zero, false, response)
sendActiveAck(
transid,
activation,
job.msg.blocking,
job.msg.rootControllerIndex,
job.msg.user,
CombinedCompletionAndResultMessage(transid, activation, instance))
storeActivation(transid, activation, context)
}
.flatMap { container =>
// now attempt to inject the user code and run the action
initializeAndRun(container, job)
.map(_ => RunCompleted)
}
.pipeTo(self)
goto(Running)
}
when(Starting) {
// container was successfully obtained
case Event(completed: PreWarmCompleted, _) =>
context.parent ! NeedWork(completed.data)
goto(Started) using completed.data
// container creation failed
case Event(_: FailureMessage, _) =>
context.parent ! ContainerRemoved
stop()
case _ => delay
}
when(Started) {
case Event(job: Run, data: PreWarmedData) =>
implicit val transid = job.msg.transid
activeCount += 1
initializeAndRun(data.container, job)
.map(_ => RunCompleted)
.pipeTo(self)
goto(Running) using PreWarmedData(data.container, data.kind, data.memoryLimit, 1)
case Event(Remove, data: PreWarmedData) => destroyContainer(data.container)
}
when(Running) {
// Intermediate state, we were able to start a container
// and we keep it in case we need to destroy it.
case Event(completed: PreWarmCompleted, _) => stay using completed.data
// Run during prewarm init (for concurrent > 1)
case Event(job: Run, data: PreWarmedData) =>
implicit val transid = job.msg.transid
logging.info(this, s"buffering for warming container ${data.container}; ${activeCount} activations in flight")
runBuffer = runBuffer.enqueue(job)
stay()
// Run during cold init (for concurrent > 1)
case Event(job: Run, _: NoData) =>
implicit val transid = job.msg.transid
logging.info(this, s"buffering for cold warming container ${activeCount} activations in flight")
runBuffer = runBuffer.enqueue(job)
stay()
// Init was successful
case Event(completed: InitCompleted, _: PreWarmedData) =>
processBuffer(completed.data.action, completed.data)
stay using completed.data
// Init was successful
case Event(data: WarmedData, _: PreWarmedData) =>
//in case concurrency supported, multiple runs can begin as soon as init is complete
context.parent ! NeedWork(data)
stay using data
// Run was successful
case Event(RunCompleted, data: WarmedData) =>
activeCount -= 1
//if there are items in runbuffer, process them if there is capacity, and stay; otherwise if we have any pending activations, also stay
if (requestWork(data) || activeCount > 0) {
stay using data
} else {
goto(Ready) using data
}
case Event(job: Run, data: WarmedData)
if activeCount >= data.action.limits.concurrency.maxConcurrent && !rescheduleJob => //if we are over concurrency limit, and not a failure on resume
implicit val transid = job.msg.transid
logging.warn(this, s"buffering for maxed warm container ${data.container}; ${activeCount} activations in flight")
runBuffer = runBuffer.enqueue(job)
stay()
case Event(job: Run, data: WarmedData)
if activeCount < data.action.limits.concurrency.maxConcurrent && !rescheduleJob => //if there was a delay, and not a failure on resume, skip the run
activeCount += 1
implicit val transid = job.msg.transid
bufferProcessing = false //reset buffer processing state
initializeAndRun(data.container, job)
.map(_ => RunCompleted)
.pipeTo(self)
stay() using data
// Failed after /init (the first run failed)
case Event(_: FailureMessage, data: PreWarmedData) =>
activeCount -= 1
destroyContainer(data.container)
// Failed for a subsequent /run
case Event(_: FailureMessage, data: WarmedData) =>
activeCount -= 1
destroyContainer(data.container)
// Failed at getting a container for a cold-start run
case Event(_: FailureMessage, _) =>
activeCount -= 1
context.parent ! ContainerRemoved
rejectBuffered()
stop()
case _ => delay
}
when(Ready, stateTimeout = pauseGrace) {
case Event(job: Run, data: WarmedData) =>
implicit val transid = job.msg.transid
activeCount += 1
initializeAndRun(data.container, job)
.map(_ => RunCompleted)
.pipeTo(self)
goto(Running) using data
// pause grace timed out
case Event(StateTimeout, data: WarmedData) =>
data.container.suspend()(TransactionId.invokerNanny).map(_ => ContainerPaused).pipeTo(self)
goto(Pausing)
case Event(Remove, data: WarmedData) => destroyContainer(data.container)
}
when(Pausing) {
case Event(ContainerPaused, data: WarmedData) => goto(Paused)
case Event(_: FailureMessage, data: WarmedData) => destroyContainer(data.container)
case _ => delay
}
when(Paused, stateTimeout = unusedTimeout) {
case Event(job: Run, data: WarmedData) =>
implicit val transid = job.msg.transid
activeCount += 1
data.container
.resume()
.andThen {
// Sending the message to self on a failure will cause the message
// to ultimately be sent back to the parent (which will retry it)
// when container removal is done.
case Failure(_) =>
rescheduleJob = true
self ! job
}
.flatMap(_ => initializeAndRun(data.container, job))
.map(_ => RunCompleted)
.pipeTo(self)
goto(Running) using data
// container is reclaimed by the pool or it has become too old
case Event(StateTimeout | Remove, data: WarmedData) =>
rescheduleJob = true // to supress sending message to the pool and not double count
destroyContainer(data.container)
}
when(Removing) {
case Event(job: Run, _) =>
// Send the job back to the pool to be rescheduled
context.parent ! job
stay
case Event(ContainerRemoved, _) => stop()
case Event(_: FailureMessage, _) => stop()
}
// Unstash all messages stashed while in intermediate state
onTransition {
case _ -> Started => unstashAll()
case _ -> Ready => unstashAll()
case _ -> Paused => unstashAll()
case _ -> Removing => unstashAll()
}
initialize()
/** Either process runbuffer or signal parent to send work; return true if runbuffer is being processed */
def requestWork(newData: WarmedData): Boolean = {
//if there is concurrency capacity, process runbuffer, signal NeedWork, or both
if (activeCount < newData.action.limits.concurrency.maxConcurrent) {
if (runBuffer.nonEmpty) {
//only request work once, if available larger than runbuffer
val available = newData.action.limits.concurrency.maxConcurrent - activeCount
val needWork: Boolean = available > runBuffer.size
processBuffer(newData.action, newData)
if (needWork) {
//after buffer processing, then send NeedWork
context.parent ! NeedWork(newData)
}
true
} else {
context.parent ! NeedWork(newData)
bufferProcessing //true in case buffer is still in process
}
} else {
false
}
}
/** Process buffered items up to the capacity of action concurrency config */
def processBuffer(action: ExecutableWhiskAction, newData: ContainerData) = {
//send as many buffered as possible
val available = action.limits.concurrency.maxConcurrent - activeCount
logging.info(this, s"resending up to ${available} from ${runBuffer.length} buffered jobs")
1 to available foreach { _ =>
runBuffer.dequeueOption match {
case Some((run, q)) =>
self ! run
bufferProcessing = true
runBuffer = q
case _ =>
}
}
}
/** Delays all incoming messages until unstashAll() is called */
def delay = {
stash()
stay
}
/**
* Destroys the container after unpausing it if needed. Can be used
* as a state progression as it goes to Removing.
*
* @param container the container to destroy
*/
def destroyContainer(container: Container) = {
if (!rescheduleJob) {
context.parent ! ContainerRemoved
} else {
context.parent ! RescheduleJob
}
rejectBuffered()
val unpause = stateName match {
case Paused => container.resume()(TransactionId.invokerNanny)
case _ => Future.successful(())
}
unpause
.flatMap(_ => container.destroy()(TransactionId.invokerNanny))
.map(_ => ContainerRemoved)
.pipeTo(self)
goto(Removing)
}
/**
* Return any buffered jobs to parent, in case buffer is not empty at removal/error time.
*/
def rejectBuffered() = {
//resend any buffered items on container removal
if (runBuffer.nonEmpty) {
logging.info(this, s"resending ${runBuffer.size} buffered jobs to parent on container removal")
runBuffer.foreach(context.parent ! _)
runBuffer = immutable.Queue.empty[Run]
}
}
/**
* Runs the job, initialize first if necessary.
* Completes the job by:
* 1. sending an activate ack,
* 2. fetching the logs for the run,
* 3. indicating the resource is free to the parent pool,
* 4. recording the result to the data store
*
* @param container the container to run the job on
* @param job the job to run
* @return a future completing after logs have been collected and
* added to the WhiskActivation
*/
def initializeAndRun(container: Container, job: Run)(implicit tid: TransactionId): Future[WhiskActivation] = {
val actionTimeout = job.action.limits.timeout.duration
val (env, parameters) = ContainerProxy.partitionArguments(job.msg.content, job.msg.initArgs)
val environment = Map(
"namespace" -> job.msg.user.namespace.name.toJson,
"action_name" -> job.msg.action.qualifiedNameWithLeadingSlash.toJson,
"action_version" -> job.msg.action.version.toJson,
"activation_id" -> job.msg.activationId.toString.toJson,
"transaction_id" -> job.msg.transid.id.toJson)
// if the action requests the api key to be injected into the action context, add it here;
// treat a missing annotation as requesting the api key for backward compatibility
val authEnvironment = {
if (job.action.annotations.isTruthy(Annotations.ProvideApiKeyAnnotationName, valueForNonExistent = true)) {
job.msg.user.authkey.toEnvironment.fields
} else Map.empty
}
// Only initialize iff we haven't yet warmed the container
val initialize = stateData match {
case data: WarmedData =>
Future.successful(None)
case _ =>
val owEnv = (authEnvironment ++ environment + ("deadline" -> (Instant.now.toEpochMilli + actionTimeout.toMillis).toString.toJson)) map {
case (key, value) => "__OW_" + key.toUpperCase -> value
}
container
.initialize(
job.action.containerInitializer(env ++ owEnv),
actionTimeout,
job.action.limits.concurrency.maxConcurrent)
.map(Some(_))
}
val activation: Future[WhiskActivation] = initialize
.flatMap { initInterval =>
//immediately setup warmedData for use (before first execution) so that concurrent actions can use it asap
if (initInterval.isDefined) {
self ! InitCompleted(WarmedData(container, job.msg.user.namespace.name, job.action, Instant.now, 1))
}
val env = authEnvironment ++ environment ++ Map(
// compute deadline on invoker side avoids discrepancies inside container
// but potentially under-estimates actual deadline
"deadline" -> (Instant.now.toEpochMilli + actionTimeout.toMillis).toString.toJson)
container
.run(parameters, env.toJson.asJsObject, actionTimeout, job.action.limits.concurrency.maxConcurrent)(
job.msg.transid)
.map {
case (runInterval, response) =>
val initRunInterval = initInterval
.map(i => Interval(runInterval.start.minusMillis(i.duration.toMillis), runInterval.end))
.getOrElse(runInterval)
ContainerProxy.constructWhiskActivation(
job,
initInterval,
initRunInterval,
runInterval.duration >= actionTimeout,
response)
}
}
.recover {
case InitializationError(interval, response) =>
ContainerProxy.constructWhiskActivation(
job,
Some(interval),
interval,
interval.duration >= actionTimeout,
response)
case t =>
// Actually, this should never happen - but we want to make sure to not miss a problem
logging.error(this, s"caught unexpected error while running activation: ${t}")
ContainerProxy.constructWhiskActivation(
job,
None,
Interval.zero,
false,
ActivationResponse.whiskError(Messages.abnormalRun))
}
val splitAckMessagesPendingLogCollection = collectLogs.logsToBeCollected(job.action)
// Sending an active ack is an asynchronous operation. The result is forwarded as soon as
// possible for blocking activations so that dependent activations can be scheduled. The
// completion message which frees a load balancer slot is sent after the active ack future
// completes to ensure proper ordering.
val sendResult = if (job.msg.blocking) {
activation.map { result =>
val msg =
if (splitAckMessagesPendingLogCollection) ResultMessage(tid, result)
else CombinedCompletionAndResultMessage(tid, result, instance)
sendActiveAck(tid, result, job.msg.blocking, job.msg.rootControllerIndex, job.msg.user, msg)
}
} else {
// For non-blocking request, do not forward the result.
if (splitAckMessagesPendingLogCollection) Future.successful(())
else
activation.map { result =>
val msg = CompletionMessage(tid, result, instance)
sendActiveAck(tid, result, job.msg.blocking, job.msg.rootControllerIndex, job.msg.user, msg)
}
}
val context = UserContext(job.msg.user)
// Adds logs to the raw activation.
val activationWithLogs: Future[Either[ActivationLogReadingError, WhiskActivation]] = activation
.flatMap { activation =>
// Skips log collection entirely, if the limit is set to 0
if (!splitAckMessagesPendingLogCollection) {
Future.successful(Right(activation))
} else {
val start = tid.started(this, LoggingMarkers.INVOKER_COLLECT_LOGS, logLevel = InfoLevel)
collectLogs(tid, job.msg.user, activation, container, job.action)
.andThen {
case Success(_) => tid.finished(this, start)
case Failure(t) => tid.failed(this, start, s"reading logs failed: $t")
}
.map(logs => Right(activation.withLogs(logs)))
.recover {
case LogCollectingException(logs) =>
Left(ActivationLogReadingError(activation.withLogs(logs)))
case _ =>
Left(ActivationLogReadingError(activation.withLogs(ActivationLogs(Vector(Messages.logFailure)))))
}
}
}
activationWithLogs
.map(_.fold(_.activation, identity))
.foreach { activation =>
// Sending the completion message to the controller after the active ack ensures proper ordering
// (result is received before the completion message for blocking invokes).
if (splitAckMessagesPendingLogCollection) {
sendResult.onComplete(
_ =>
sendActiveAck(
tid,
activation,
job.msg.blocking,
job.msg.rootControllerIndex,
job.msg.user,
CompletionMessage(tid, activation, instance)))
}
// Storing the record. Entirely asynchronous and not waited upon.
storeActivation(tid, activation, context)
}
// Disambiguate activation errors and transform the Either into a failed/successful Future respectively.
activationWithLogs.flatMap {
case Right(act) if !act.response.isSuccess && !act.response.isApplicationError =>
Future.failed(ActivationUnsuccessfulError(act))
case Left(error) => Future.failed(error)
case Right(act) => Future.successful(act)
}
}
}
final case class ContainerProxyTimeoutConfig(idleContainer: FiniteDuration, pauseGrace: FiniteDuration)
object ContainerProxy {
def props(factory: (TransactionId,
String,
ImageName,
Boolean,
ByteSize,
Int,
Option[ExecutableWhiskAction]) => Future[Container],
ack: ActiveAck,
store: (TransactionId, WhiskActivation, UserContext) => Future[Any],
collectLogs: LogsCollector,
instance: InvokerInstanceId,
poolConfig: ContainerPoolConfig,
unusedTimeout: FiniteDuration = timeouts.idleContainer,
pauseGrace: FiniteDuration = timeouts.pauseGrace) =
Props(new ContainerProxy(factory, ack, store, collectLogs, instance, poolConfig, unusedTimeout, pauseGrace))
// Needs to be thread-safe as it's used by multiple proxies concurrently.
private val containerCount = new Counter
val timeouts = loadConfigOrThrow[ContainerProxyTimeoutConfig](ConfigKeys.containerProxyTimeouts)
/**
* Generates a unique container name.
*
* @param prefix the container name's prefix
* @param suffix the container name's suffix
* @return a unique container name
*/
def containerName(instance: InvokerInstanceId, prefix: String, suffix: String): String = {
def isAllowed(c: Char): Boolean = c.isLetterOrDigit || c == '_'
val sanitizedPrefix = prefix.filter(isAllowed)
val sanitizedSuffix = suffix.filter(isAllowed)
s"${ContainerFactory.containerNamePrefix(instance)}_${containerCount.next()}_${sanitizedPrefix}_${sanitizedSuffix}"
}
/**
* Creates a WhiskActivation ready to be sent via active ack.
*
* @param job the job that was executed
* @param interval the time it took to execute the job
* @param response the response to return to the user
* @return a WhiskActivation to be sent to the user
*/
def constructWhiskActivation(job: Run,
initInterval: Option[Interval],
totalInterval: Interval,
isTimeout: Boolean,
response: ActivationResponse) = {
val causedBy = Some {
if (job.msg.causedBySequence) {
Parameters(WhiskActivation.causedByAnnotation, JsString(Exec.SEQUENCE))
} else {
// emit the internal system hold time as the 'wait' time, but only for non-sequence
// actions, since the transid start time for a sequence does not correspond
// with a specific component of the activation but the entire sequence;
// it will require some work to generate a new transaction id for a sequence
// component - however, because the trace of activations is recorded in the parent
// sequence, a client can determine the queue time for sequences that way
val end = initInterval.map(_.start).getOrElse(totalInterval.start)
Parameters(
WhiskActivation.waitTimeAnnotation,
Interval(job.msg.transid.meta.start, end).duration.toMillis.toJson)
}
}
val initTime = {
initInterval.map(initTime => Parameters(WhiskActivation.initTimeAnnotation, initTime.duration.toMillis.toJson))
}
val binding =
job.msg.action.binding.map(f => Parameters(WhiskActivation.bindingAnnotation, JsString(f.asString)))
WhiskActivation(
activationId = job.msg.activationId,
namespace = job.msg.user.namespace.name.toPath,
subject = job.msg.user.subject,
cause = job.msg.cause,
name = job.action.name,
version = job.action.version,
start = totalInterval.start,
end = totalInterval.end,
duration = Some(totalInterval.duration.toMillis),
response = response,
annotations = {
Parameters(WhiskActivation.limitsAnnotation, job.action.limits.toJson) ++
Parameters(WhiskActivation.pathAnnotation, JsString(job.action.fullyQualifiedName(false).asString)) ++
Parameters(WhiskActivation.kindAnnotation, JsString(job.action.exec.kind)) ++
Parameters(WhiskActivation.timeoutAnnotation, JsBoolean(isTimeout)) ++
causedBy ++ initTime ++ binding ++ Parameters(WhiskActivation.transIdAnnotation, JsString(job.msg.transid.id))
})
}
/**
* Partitions the activation arguments into two JsObject instances. The first is exported as intended for export
* by the action runtime to the environment. The second is passed on as arguments to the action.
*
* @param content the activation arguments
* @param initArgs set of parameters to treat as initialization arguments
* @return A partition of the arguments into an environment variables map and the JsObject argument to the action
*/
def partitionArguments(content: Option[JsObject], initArgs: Set[String]): (Map[String, JsValue], JsObject) = {
content match {
case None => (Map.empty, JsObject.empty)
case Some(js) if initArgs.isEmpty => (Map.empty, js)
case Some(js) =>
val (env, args) = js.fields.partition(k => initArgs.contains(k._1))
(env, JsObject(args))
}
}
}
/** Indicates that something went wrong with an activation and the container should be removed */
trait ActivationError extends Exception {
val activation: WhiskActivation
}
/** Indicates an activation with a non-successful response */
case class ActivationUnsuccessfulError(activation: WhiskActivation) extends ActivationError
/** Indicates reading logs for an activation failed (terminally, truncated) */
case class ActivationLogReadingError(activation: WhiskActivation) extends ActivationError
| RSulzmann/openwhisk | core/invoker/src/main/scala/org/apache/openwhisk/core/containerpool/ContainerProxy.scala | Scala | apache-2.0 | 37,473 |
package no.nr.edvard.convergence.models
class JavaType(
val name: JavaTypeName,
_superClass: => JavaType,
_interfaces: => List[JavaType],
_outerClass: => JavaType,
_callees: => List[JavaType]
) extends JavaElement {
require(name != null)
lazy val superClass = _superClass
lazy val interfaces = _interfaces
lazy val outerClass = _outerClass
lazy val callees = _callees
override def hashCode: Int = name.hashCode
override def equals(other: Any) =
(other != null && other.isInstanceOf[JavaType] &&
name == other.asInstanceOf[JavaType].name)
}
object JavaType {
val NO_TYPE = new JavaType(new JavaTypeName("$$$NO_TYPE$$$"), null, null,
null, null)
val UNKNOWN_OR_NO_TYPE = new JavaType(new JavaTypeName("$$$UNK_OR_NO_TYPE$$"),
null, null, null, null)
} | edwkar/edwbsc | projects/Osiris/src/main/scala/no/nr/edvard/convergence/models/JavaType.scala | Scala | gpl-2.0 | 799 |
package org.talkingpuffin.snippet
class LineCollector {
var msgs = List[LineCollector.InfoLine]()
def disp(heading: String, value: AnyRef) = msgs = LineCollector.InfoLine(heading, value) :: msgs
}
object LineCollector {
case class InfoLine(heading: String, value: AnyRef)
}
| dcbriccetti/talking-puffin | web-mvn/src/main/scala/org/talkingpuffin/snippet/LineCollector.scala | Scala | mit | 282 |
/*
* Copyright (c) Microsoft. All rights reserved.
* Licensed under the MIT license. See LICENSE file in the project root for full license information.
*/
package org.apache.spark.util.csharp
import java.io._
import java.nio.file._
import java.nio.file.attribute.PosixFilePermission
import java.nio.file.attribute.PosixFilePermission._
import java.util.{Timer, TimerTask}
import org.apache.commons.compress.archivers.zip.{ZipArchiveEntry, ZipArchiveOutputStream, ZipFile}
import org.apache.commons.io.{FileUtils, IOUtils}
import org.apache.spark.internal.Logging
import scala.collection.JavaConverters._
import scala.collection.Set
/**
* Utility methods used by SparkCLR.
*/
object Utils extends Logging {
private val posixFilePermissions = Array(
OWNER_READ, OWNER_WRITE, OWNER_EXECUTE,
GROUP_READ, GROUP_WRITE, GROUP_EXECUTE,
OTHERS_READ, OTHERS_WRITE, OTHERS_EXECUTE
)
val supportPosix = FileSystems.getDefault.supportedFileAttributeViews().contains("posix")
/**
* Compress all files under given directory into one zip file and drop it to the target directory
*
* @param sourceDir source directory to zip
* @param targetZipFile target zip file
*/
def zip(sourceDir: File, targetZipFile: File): Unit = {
var fos: FileOutputStream = null
var zos: ZipArchiveOutputStream = null
try {
fos = new FileOutputStream(targetZipFile)
zos = new ZipArchiveOutputStream(fos)
val sourcePath = sourceDir.toPath
FileUtils.listFiles(sourceDir, null, true).asScala.foreach { file =>
var in: FileInputStream = null
try {
val path = file.toPath
val entry = new ZipArchiveEntry(sourcePath.relativize(path).toString)
if (supportPosix) {
entry.setUnixMode(permissionsToMode(Files.getPosixFilePermissions(path).asScala)
| (if (entry.getName.endsWith(".exe")) 0x1ED else 0x1A4))
} else if (entry.getName.endsWith(".exe")) {
entry.setUnixMode(0x1ED) // 755
} else {
entry.setUnixMode(0x1A4) // 644
}
zos.putArchiveEntry(entry)
in = new FileInputStream(file)
IOUtils.copy(in, zos)
zos.closeArchiveEntry()
} finally {
IOUtils.closeQuietly(in)
}
}
} finally {
IOUtils.closeQuietly(zos)
IOUtils.closeQuietly(fos)
}
}
/**
* Unzip a file to the given directory
*
* @param file file to be unzipped
* @param targetDir target directory
*/
def unzip(file: File, targetDir: File): Unit = {
var zipFile: ZipFile = null
try {
targetDir.mkdirs()
zipFile = new ZipFile(file)
zipFile.getEntries.asScala.foreach { entry =>
val targetFile = new File(targetDir, entry.getName)
if (targetFile.exists()) {
logWarning(s"Target file/directory $targetFile already exists. Skip it for now. " +
s"Make sure this is expected.")
} else {
if (entry.isDirectory) {
targetFile.mkdirs()
} else {
targetFile.getParentFile.mkdirs()
val input = zipFile.getInputStream(entry)
val output = new FileOutputStream(targetFile)
IOUtils.copy(input, output)
IOUtils.closeQuietly(input)
IOUtils.closeQuietly(output)
if(supportPosix) {
Files.setPosixFilePermissions(
targetFile.toPath, modeToPermissions(entry.getUnixMode).asJava)
}
}
}
}
} catch {
case e: Exception => logError("exception caught during decompression:" + e)
} finally {
ZipFile.closeQuietly(zipFile)
}
}
/**
* Exits the JVM, trying to do it nicely, otherwise doing it nastily.
*
* @param status the exit status, zero for OK, non-zero for error
* @param maxDelayMillis the maximum delay in milliseconds
*/
def exit(status: Int, maxDelayMillis: Long) {
try {
logInfo(s"Utils.exit() with status: $status, maxDelayMillis: $maxDelayMillis")
// setup a timer, so if nice exit fails, the nasty exit happens
val timer = new Timer()
timer.schedule(new TimerTask() {
@Override
def run() {
Runtime.getRuntime.halt(status)
}
}, maxDelayMillis)
// try to exit nicely
System.exit(status);
} catch {
// exit nastily if we have a problem
case ex: Throwable => Runtime.getRuntime.halt(status)
} finally {
// should never get here
Runtime.getRuntime.halt(status)
}
}
/**
* Exits the JVM, trying to do it nicely, wait 1 second
*
* @param status the exit status, zero for OK, non-zero for error
*/
def exit(status: Int): Unit = {
exit(status, 1000)
}
private[spark] def listZipFileEntries(file: File): Array[String] = {
var zipFile: ZipFile = null
try {
zipFile = new ZipFile(file)
zipFile.getEntries.asScala.map(_.getName).toArray
} finally {
ZipFile.closeQuietly(zipFile)
}
}
private[this] def permissionsToMode(permissions: Set[PosixFilePermission]): Int = {
posixFilePermissions.foldLeft(0) { (mode, perm) =>
(mode << 1) | (if (permissions.contains(perm)) 1 else 0)
}
}
private[this] def modeToPermissions(mode: Int): Set[PosixFilePermission] = {
posixFilePermissions.zipWithIndex
.filter { case (_, i) => (mode & (0x100 >>> i)) != 0 }
.map(_._1).toSet
}
}
| hebinhuang/Mobius | scala/src/main/org/apache/spark/util/csharp/Utils.scala | Scala | mit | 5,489 |
package name.denyago.yasc.integration.httpapi
import org.scalatest.FunSpec
class ChatApiSpec extends FunSpec {
describe("HTTP API") {
describe("Sign in") {
ignore("should accept new users") {
}
ignore("should not accept already signed in users") {
}
}
describe("Sign out") {
ignore("should let already signed in users") {
}
ignore("should not let not yet signed in users") {
}
}
describe("Get session") {
ignore("should return all messages for signed in users") {
}
ignore("should not return all messages for not yet signed in users") {
}
}
describe("Post a message") {
ignore("should accept message from a signed in user") {
}
ignore("should not accept message from not yet signed in user") {
}
}
describe("Get chat subscription") {
ignore("should push new messages from other users when the user is signed in") {
}
ignore("should not push new mesages from other users when the user not yet signed in") {
}
}
}
}
| denyago/yet-another-simple-chat | src/test/scala/name/denyago/yasc/integration/httpapi/ChatApiSpec.scala | Scala | mit | 1,099 |
/**
* Copyright (C) 2016 DANS - Data Archiving and Networked Services (info@dans.knaw.nl)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.knaw.dans.easy.multideposit
import better.files.File
import cats.data.{ NonEmptyChain, ValidatedNec }
import cats.syntax.validated._
import nl.knaw.dans.easy.multideposit.parser.Headers.Header
import nl.knaw.dans.lib.string._
package object parser {
case class DepositRow(rowNum: Int, content: Map[Header, String])
type DepositRows = Seq[DepositRow]
implicit class DatasetRowFind(val row: DepositRow) extends AnyVal {
def find(name: Header): Option[String] = row.content.get(name).filterNot(_.isBlank)
}
type Validated[T] = ValidatedNec[ParserError, T]
implicit class ValidatedSyntax[T](val t: T) extends AnyVal {
def toValidated: Validated[T] = t.validNec[ParserError]
}
private[parser] sealed abstract class ParserError {
def toInvalid[T]: Validated[T] = this.invalidNec[T]
def chained: NonEmptyChain[ParserError] = NonEmptyChain.one(this)
}
private[parser] case class EmptyInstructionsFileError(file: File) extends ParserError
private[parser] case class ParseError(row: Int, message: String) extends ParserError
}
| DANS-KNAW/easy-split-multi-deposit | src/main/scala/nl.knaw.dans.easy.multideposit/parser/package.scala | Scala | apache-2.0 | 1,723 |
/**
* Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
*/
package akka.actor
import akka.event.EventHandler
import akka.dispatch._
import akka.config.Supervision._
import akka.util._
import ReflectiveAccess._
import java.net.InetSocketAddress
import java.util.concurrent.atomic.AtomicReference
import java.util.concurrent.{ ScheduledFuture, ConcurrentHashMap, TimeUnit }
import java.util.{ Map => JMap }
import scala.beans.BeanProperty
import scala.collection.immutable.Stack
import scala.annotation.tailrec
private[akka] object ActorRefInternals {
/**
* LifeCycles for ActorRefs.
*/
private[akka] sealed trait StatusType
object UNSTARTED extends StatusType
object RUNNING extends StatusType
object BEING_RESTARTED extends StatusType
object SHUTDOWN extends StatusType
}
/**
* Abstraction for unification of sender and senderFuture for later reply.
* Can be stored away and used at a later point in time.
*/
abstract class Channel[T] {
/**
* Scala API. <p/>
* Sends the specified message to the channel.
*/
def !(msg: T): Unit
/**
* Java API. <p/>
* Sends the specified message to the channel.
*/
def sendOneWay(msg: T): Unit = this.!(msg)
}
/**
* ActorRef is an immutable and serializable handle to an Actor.
* <p/>
* Create an ActorRef for an Actor by using the factory method on the Actor object.
* <p/>
* Here is an example on how to create an actor with a default constructor.
* <pre>
* import Actor._
*
* val actor = actorOf[MyActor]
* actor.start()
* actor ! message
* actor.stop()
* </pre>
*
* You can also create and start actors like this:
* <pre>
* val actor = actorOf[MyActor].start()
* </pre>
*
* Here is an example on how to create an actor with a non-default constructor.
* <pre>
* import Actor._
*
* val actor = actorOf(new MyActor(...))
* actor.start()
* actor ! message
* actor.stop()
* </pre>
*
* @author <a href="http://jonasboner.com">Jonas Bonér</a>
*/
trait ActorRef extends ActorRefShared with java.lang.Comparable[ActorRef] { scalaRef: ScalaActorRef =>
// Only mutable for RemoteServer in order to maintain identity across nodes
@volatile
protected[akka] var _uuid = newUuid
@volatile
protected[this] var _status: ActorRefInternals.StatusType = ActorRefInternals.UNSTARTED
/**
* User overridable callback/setting.
* <p/>
* Identifier for actor, does not have to be a unique one. Default is the 'uuid'.
* <p/>
* This field is used for logging, AspectRegistry.actorsFor(id), identifier for remote
* actor in RemoteServer etc.But also as the identifier for persistence, which means
* that you can use a custom name to be able to retrieve the "correct" persisted state
* upon restart, remote restart etc.
*/
@BeanProperty
@volatile
var id: String = _uuid.toString
/**
* User overridable callback/setting.
* <p/>
* Defines the default timeout for '!!' and '!!!' invocations,
* e.g. the timeout for the future returned by the call to '!!' and '!!!'.
*/
@deprecated("Will be replaced by implicit-scoped timeout on all methods that needs it, will default to timeout specified in config", "1.1")
@BeanProperty
@volatile
var timeout: Long = Actor.TIMEOUT
/**
* User overridable callback/setting.
* <p/>
* Defines the default timeout for an initial receive invocation.
* When specified, the receive function should be able to handle a 'ReceiveTimeout' message.
*/
@volatile
var receiveTimeout: Option[Long] = None
/**
* Akka Java API. <p/>
* Defines the default timeout for an initial receive invocation.
* When specified, the receive function should be able to handle a 'ReceiveTimeout' message.
*/
def setReceiveTimeout(timeout: Long) = this.receiveTimeout = Some(timeout)
def getReceiveTimeout(): Option[Long] = receiveTimeout
/**
* Akka Java API. <p/>
* A faultHandler defines what should be done when a linked actor signals an error.
* <p/>
* Can be one of:
* <pre>
* getContext().setFaultHandler(new AllForOneStrategy(new Class[]{Throwable.class},maxNrOfRetries, withinTimeRange));
* </pre>
* Or:
* <pre>
* getContext().setFaultHandler(new OneForOneStrategy(new Class[]{Throwable.class},maxNrOfRetries, withinTimeRange));
* </pre>
*/
def setFaultHandler(handler: FaultHandlingStrategy)
def getFaultHandler(): FaultHandlingStrategy
/**
* Akka Java API. <p/>
* A lifeCycle defines whether the actor will be stopped on error (Temporary) or if it can be restarted (Permanent)
* <p/>
* Can be one of:
*
* import static akka.config.Supervision.*;
* <pre>
* getContext().setLifeCycle(permanent());
* </pre>
* Or:
* <pre>
* getContext().setLifeCycle(temporary());
* </pre>
*/
def setLifeCycle(lifeCycle: LifeCycle): Unit
def getLifeCycle(): LifeCycle
/**
* Akka Java API. <p/>
* The default dispatcher is the <tt>Dispatchers.globalExecutorBasedEventDrivenDispatcher</tt>.
* This means that all actors will share the same event-driven executor based dispatcher.
* <p/>
* You can override it so it fits the specific use-case that the actor is used for.
* See the <tt>akka.dispatch.Dispatchers</tt> class for the different
* dispatchers available.
* <p/>
* The default is also that all actors that are created and spawned from within this actor
* is sharing the same dispatcher as its creator.
*/
def setDispatcher(dispatcher: MessageDispatcher) = this.dispatcher = dispatcher
def getDispatcher(): MessageDispatcher = dispatcher
/**
* Returns on which node this actor lives if None it lives in the local ActorRegistry
*/
@deprecated("Remoting will become fully transparent in the future", "1.1")
def homeAddress: Option[InetSocketAddress]
/**
* Java API. <p/>
*/
@deprecated("Remoting will become fully transparent in the future", "1.1")
def getHomeAddress(): InetSocketAddress = homeAddress getOrElse null
/**
* Holds the hot swapped partial function.
*/
@volatile
protected[akka] var hotswap = Stack[PartialFunction[Any, Unit]]()
/**
* This is a reference to the message currently being processed by the actor
*/
@volatile
protected[akka] var currentMessage: MessageInvocation = null
/**
* Comparison only takes uuid into account.
*/
def compareTo(other: ActorRef) = this.uuid compareTo other.uuid
/**
* Returns the uuid for the actor.
*/
def getUuid() = _uuid
def uuid = _uuid
/**
* Akka Java API. <p/>
* The reference sender Actor of the last received message.
* Is defined if the message was sent from another Actor, else None.
*/
def getSender(): Option[ActorRef] = sender
/**
* Akka Java API. <p/>
* The reference sender future of the last received message.
* Is defined if the message was sent with sent with '!!' or '!!!', else None.
*/
def getSenderFuture(): Option[CompletableFuture[Any]] = senderFuture
/**
* Is the actor being restarted?
*/
def isBeingRestarted: Boolean = _status == ActorRefInternals.BEING_RESTARTED
/**
* Is the actor running?
*/
def isRunning: Boolean = _status match {
case ActorRefInternals.BEING_RESTARTED | ActorRefInternals.RUNNING => true
case _ => false
}
/**
* Is the actor shut down?
*/
def isShutdown: Boolean = _status == ActorRefInternals.SHUTDOWN
/**
* Is the actor ever started?
*/
def isUnstarted: Boolean = _status == ActorRefInternals.UNSTARTED
/**
* Is the actor able to handle the message passed in as arguments?
*/
@deprecated("Will be removed without replacement, it's just not reliable in the face of `become` and `unbecome`", "1.1")
def isDefinedAt(message: Any): Boolean = actor.isDefinedAt(message)
/**
* Only for internal use. UUID is effectively final.
*/
protected[akka] def uuid_=(uid: Uuid) = _uuid = uid
/**
* Akka Java API. <p/>
* Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
* <p/>
* <pre>
* actor.sendOneWay(message);
* </pre>
* <p/>
*/
def sendOneWay(message: AnyRef): Unit = sendOneWay(message, null)
/**
* Akka Java API. <p/>
* Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
* <p/>
* Allows you to pass along the sender of the message.
* <p/>
* <pre>
* actor.sendOneWay(message, context);
* </pre>
* <p/>
*/
def sendOneWay(message: AnyRef, sender: ActorRef): Unit = this.!(message)(Option(sender))
/**
* Akka Java API. <p/>
* @see sendRequestReply(message: AnyRef, timeout: Long, sender: ActorRef)
* Uses the default timeout of the Actor (setTimeout()) and omits the sender reference
*/
def sendRequestReply(message: AnyRef): AnyRef = sendRequestReply(message, timeout, null)
/**
* Akka Java API. <p/>
* @see sendRequestReply(message: AnyRef, timeout: Long, sender: ActorRef)
* Uses the default timeout of the Actor (setTimeout())
*/
def sendRequestReply(message: AnyRef, sender: ActorRef): AnyRef = sendRequestReply(message, timeout, sender)
/**
* Akka Java API. <p/>
* Sends a message asynchronously and waits on a future for a reply message under the hood.
* <p/>
* It waits on the reply either until it receives it or until the timeout expires
* (which will throw an ActorTimeoutException). E.g. send-and-receive-eventually semantics.
* <p/>
* <b>NOTE:</b>
* Use this method with care. In most cases it is better to use 'sendOneWay' together with 'getContext().getSender()' to
* implement request/response message exchanges.
* <p/>
* If you are sending messages using <code>sendRequestReply</code> then you <b>have to</b> use <code>getContext().reply(..)</code>
* to send a reply message to the original sender. If not then the sender will block until the timeout expires.
*/
def sendRequestReply(message: AnyRef, timeout: Long, sender: ActorRef): AnyRef = {
!!(message, timeout)(Option(sender)).getOrElse(throw new ActorTimeoutException(
"Message [" + message +
"]\\n\\tsent to [" + actorClassName +
"]\\n\\tfrom [" + (if (sender ne null) sender.actorClassName else "nowhere") +
"]\\n\\twith timeout [" + timeout +
"]\\n\\ttimed out."))
.asInstanceOf[AnyRef]
}
/**
* Akka Java API. <p/>
* @see sendRequestReplyFuture(message: AnyRef, sender: ActorRef): Future[_]
* Uses the Actors default timeout (setTimeout()) and omits the sender
*/
def sendRequestReplyFuture[T <: AnyRef](message: AnyRef): Future[T] = sendRequestReplyFuture(message, timeout, null).asInstanceOf[Future[T]]
/**
* Akka Java API. <p/>
* @see sendRequestReplyFuture(message: AnyRef, sender: ActorRef): Future[_]
* Uses the Actors default timeout (setTimeout())
*/
def sendRequestReplyFuture[T <: AnyRef](message: AnyRef, sender: ActorRef): Future[T] = sendRequestReplyFuture(message, timeout, sender).asInstanceOf[Future[T]]
/**
* Akka Java API. <p/>
* Sends a message asynchronously returns a future holding the eventual reply message.
* <p/>
* <b>NOTE:</b>
* Use this method with care. In most cases it is better to use 'sendOneWay' together with the 'getContext().getSender()' to
* implement request/response message exchanges.
* <p/>
* If you are sending messages using <code>sendRequestReplyFuture</code> then you <b>have to</b> use <code>getContext().reply(..)</code>
* to send a reply message to the original sender. If not then the sender will block until the timeout expires.
*/
def sendRequestReplyFuture[T <: AnyRef](message: AnyRef, timeout: Long, sender: ActorRef): Future[T] = !!!(message, timeout)(Option(sender)).asInstanceOf[Future[T]]
/**
* Akka Java API. <p/>
* Forwards the message specified to this actor and preserves the original sender of the message
*/
def forward(message: AnyRef, sender: ActorRef): Unit =
if (sender eq null) throw new IllegalArgumentException("The 'sender' argument to 'forward' can't be null")
else forward(message)(Some(sender))
/**
* Akka Java API. <p/>
* Use <code>getContext().replyUnsafe(..)</code> to reply with a message to the original sender of the message currently
* being processed.
* <p/>
* Throws an IllegalStateException if unable to determine what to reply to.
*/
def replyUnsafe(message: AnyRef) = reply(message)
/**
* Akka Java API. <p/>
* Use <code>getContext().replySafe(..)</code> to reply with a message to the original sender of the message currently
* being processed.
* <p/>
* Returns true if reply was sent, and false if unable to determine what to reply to.
*/
def replySafe(message: AnyRef): Boolean = reply_?(message)
/**
* Returns the class for the Actor instance that is managed by the ActorRef.
*/
@deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
def actorClass: Class[_ <: Actor]
/**
* Akka Java API. <p/>
* Returns the class for the Actor instance that is managed by the ActorRef.
*/
@deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
def getActorClass(): Class[_ <: Actor] = actorClass
/**
* Returns the class name for the Actor instance that is managed by the ActorRef.
*/
@deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
def actorClassName: String
/**
* Akka Java API. <p/>
* Returns the class name for the Actor instance that is managed by the ActorRef.
*/
@deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
def getActorClassName(): String = actorClassName
/**
* Sets the dispatcher for this actor. Needs to be invoked before the actor is started.
*/
def dispatcher_=(md: MessageDispatcher): Unit
/**
* Get the dispatcher for this actor.
*/
def dispatcher: MessageDispatcher
/**
* Starts up the actor and its message queue.
*/
def start(): ActorRef
/**
* Shuts down the actor its dispatcher and message queue.
* Alias for 'stop'.
*/
def exit() = stop()
/**
* Shuts down the actor its dispatcher and message queue.
*/
def stop(): Unit
/**
* Links an other actor to this actor. Links are unidirectional and means that a the linking actor will
* receive a notification if the linked actor has crashed.
* <p/>
* If the 'trapExit' member field of the 'faultHandler' has been set to at contain at least one exception class then it will
* 'trap' these exceptions and automatically restart the linked actors according to the restart strategy
* defined by the 'faultHandler'.
*/
def link(actorRef: ActorRef): Unit
/**
* Unlink the actor.
*/
def unlink(actorRef: ActorRef): Unit
/**
* Atomically start and link an actor.
*/
def startLink(actorRef: ActorRef): Unit
/**
* Atomically create (from actor class) and start an actor.
* <p/>
* To be invoked from within the actor itself.
*/
@deprecated("Will be removed after 1.1, use Actor.actorOf instead", "1.1")
def spawn(clazz: Class[_ <: Actor]): ActorRef
/**
* Atomically create (from actor class), make it remote and start an actor.
* <p/>
* To be invoked from within the actor itself.
*/
@deprecated("Will be removed after 1.1, client managed actors will be removed", "1.1")
def spawnRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long): ActorRef
/**
* Atomically create (from actor class), link and start an actor.
* <p/>
* To be invoked from within the actor itself.
*/
@deprecated("Will be removed after 1.1, use Actor.remote.actorOf instead and then link on success", "1.1")
def spawnLink(clazz: Class[_ <: Actor]): ActorRef
/**
* Atomically create (from actor class), make it remote, link and start an actor.
* <p/>
* To be invoked from within the actor itself.
*/
@deprecated("Will be removed after 1.1, client managed actors will be removed", "1.1")
def spawnLinkRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long): ActorRef
/**
* Returns the mailbox size.
*/
def mailboxSize = dispatcher.mailboxSize(this)
/**
* Akka Java API. <p/>
* Returns the mailbox size.
*/
def getMailboxSize(): Int = mailboxSize
/**
* Returns the supervisor, if there is one.
*/
def supervisor: Option[ActorRef]
/**
* Akka Java API. <p/>
* Returns the supervisor, if there is one.
*/
def getSupervisor(): ActorRef = supervisor getOrElse null
/**
* Returns an unmodifiable Java Map containing the linked actors,
* please note that the backing map is thread-safe but not immutable
*/
def linkedActors: JMap[Uuid, ActorRef]
/**
* Java API. <p/>
* Returns an unmodifiable Java Map containing the linked actors,
* please note that the backing map is thread-safe but not immutable
*/
def getLinkedActors(): JMap[Uuid, ActorRef] = linkedActors
/**
* Abstraction for unification of sender and senderFuture for later reply
*/
def channel: Channel[Any] = {
if (senderFuture.isDefined) {
new Channel[Any] {
val future = senderFuture.get
def !(msg: Any) = future completeWithResult msg
}
} else if (sender.isDefined) {
val someSelf = Some(this)
new Channel[Any] {
val client = sender.get
def !(msg: Any) = client.!(msg)(someSelf)
}
} else throw new IllegalActorStateException("No channel available")
}
/**
* Java API. <p/>
* Abstraction for unification of sender and senderFuture for later reply
*/
def getChannel: Channel[Any] = channel
protected[akka] def invoke(messageHandle: MessageInvocation): Unit
protected[akka] def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit
protected[akka] def postMessageToMailboxAndCreateFutureResultWithTimeout[T](
message: Any,
timeout: Long,
senderOption: Option[ActorRef],
senderFuture: Option[CompletableFuture[T]]): CompletableFuture[T]
protected[akka] def actorInstance: AtomicReference[Actor]
protected[akka] def actor: Actor = actorInstance.get
protected[akka] def supervisor_=(sup: Option[ActorRef]): Unit
protected[akka] def mailbox: AnyRef
protected[akka] def mailbox_=(value: AnyRef): AnyRef
protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable): Unit
protected[akka] def restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit
protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit
protected[akka] def registerSupervisorAsRemoteActor: Option[Uuid]
override def hashCode: Int = HashCode.hash(HashCode.SEED, uuid)
override def equals(that: Any): Boolean = {
that.isInstanceOf[ActorRef] &&
that.asInstanceOf[ActorRef].uuid == uuid
}
override def toString = "Actor[" + id + ":" + uuid + "]"
}
/**
* Local (serializable) ActorRef that is used when referencing the Actor on its "home" node.
*
* @author <a href="http://jonasboner.com">Jonas Bonér</a>
*/
class LocalActorRef private[akka] (
private[this] val actorFactory: () => Actor,
val homeAddress: Option[InetSocketAddress],
val clientManaged: Boolean = false)
extends ActorRef with ScalaActorRef {
protected[akka] val guard = new ReentrantGuard
@volatile
protected[akka] var _futureTimeout: Option[ScheduledFuture[AnyRef]] = None
@volatile
private[akka] lazy val _linkedActors = new ConcurrentHashMap[Uuid, ActorRef]
@volatile
private[akka] var _supervisor: Option[ActorRef] = None
@volatile
private var maxNrOfRetriesCount: Int = 0
@volatile
private var restartsWithinTimeRangeTimestamp: Long = 0L
@volatile
private var _mailbox: AnyRef = _
@volatile
private[akka] var _dispatcher: MessageDispatcher = Dispatchers.defaultGlobalDispatcher
protected[akka] val actorInstance = guard.withGuard { new AtomicReference[Actor](newActor) }
//If it was started inside "newActor", initialize it
if (isRunning) initializeActorInstance
// used only for deserialization
private[akka] def this(
__uuid: Uuid,
__id: String,
__timeout: Long,
__receiveTimeout: Option[Long],
__lifeCycle: LifeCycle,
__supervisor: Option[ActorRef],
__hotswap: Stack[PartialFunction[Any, Unit]],
__factory: () => Actor,
__homeAddress: Option[InetSocketAddress]) = {
this(__factory, __homeAddress)
_uuid = __uuid
id = __id
timeout = __timeout
receiveTimeout = __receiveTimeout
lifeCycle = __lifeCycle
_supervisor = __supervisor
hotswap = __hotswap
setActorSelfFields(actor, this)
start
}
/**
* Returns whether this actor ref is client-managed remote or not
*/
private[akka] final def isClientManaged_? = clientManaged && homeAddress.isDefined && isRemotingEnabled
// ========= PUBLIC FUNCTIONS =========
/**
* Returns the class for the Actor instance that is managed by the ActorRef.
*/
@deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
def actorClass: Class[_ <: Actor] = actor.getClass.asInstanceOf[Class[_ <: Actor]]
/**
* Returns the class name for the Actor instance that is managed by the ActorRef.
*/
@deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
def actorClassName: String = actorClass.getName
/**
* Sets the dispatcher for this actor. Needs to be invoked before the actor is started.
*/
def dispatcher_=(md: MessageDispatcher): Unit = guard.withGuard {
if (!isBeingRestarted) {
if (!isRunning) _dispatcher = md
else throw new ActorInitializationException(
"Can not swap dispatcher for " + toString + " after it has been started")
}
}
/**
* Get the dispatcher for this actor.
*/
def dispatcher: MessageDispatcher = _dispatcher
/**
* Starts up the actor and its message queue.
*/
def start(): ActorRef = guard.withGuard {
if (isShutdown) throw new ActorStartException(
"Can't restart an actor that has been shut down with 'stop' or 'exit'")
if (!isRunning) {
dispatcher.attach(this)
_status = ActorRefInternals.RUNNING
// If we are not currently creating this ActorRef instance
if ((actorInstance ne null) && (actorInstance.get ne null))
initializeActorInstance
if (isClientManaged_?)
Actor.remote.registerClientManagedActor(homeAddress.get.getAddress.getHostAddress, homeAddress.get.getPort, uuid)
checkReceiveTimeout //Schedule the initial Receive timeout
}
this
}
/**
* Shuts down the actor its dispatcher and message queue.
*/
def stop() = guard.withGuard {
if (isRunning) {
receiveTimeout = None
cancelReceiveTimeout
dispatcher.detach(this)
_status = ActorRefInternals.SHUTDOWN
try {
actor.postStop
} finally {
currentMessage = null
Actor.registry.unregister(this)
if (isRemotingEnabled) {
if (isClientManaged_?)
Actor.remote.unregisterClientManagedActor(homeAddress.get.getAddress.getHostAddress, homeAddress.get.getPort, uuid)
Actor.remote.unregister(this)
}
setActorSelfFields(actorInstance.get, null)
}
} //else if (isBeingRestarted) throw new ActorKilledException("Actor [" + toString + "] is being restarted.")
}
/**
* Links an other actor to this actor. Links are unidirectional and means that a the linking actor will
* receive a notification if the linked actor has crashed.
* <p/>
* If the 'trapExit' member field of the 'faultHandler' has been set to at contain at least one exception class then it will
* 'trap' these exceptions and automatically restart the linked actors according to the restart strategy
* defined by the 'faultHandler'.
* <p/>
* To be invoked from within the actor itself.
*/
def link(actorRef: ActorRef): Unit = guard.withGuard {
val actorRefSupervisor = actorRef.supervisor
val hasSupervisorAlready = actorRefSupervisor.isDefined
if (hasSupervisorAlready && actorRefSupervisor.get.uuid == uuid) return // we already supervise this guy
else if (hasSupervisorAlready) throw new IllegalActorStateException(
"Actor can only have one supervisor [" + actorRef + "], e.g. link(actor) fails")
else {
_linkedActors.put(actorRef.uuid, actorRef)
actorRef.supervisor = Some(this)
}
}
/**
* Unlink the actor.
* <p/>
* To be invoked from within the actor itself.
*/
def unlink(actorRef: ActorRef) = guard.withGuard {
if (_linkedActors.remove(actorRef.uuid) eq null)
throw new IllegalActorStateException("Actor [" + actorRef + "] is not a linked actor, can't unlink")
actorRef.supervisor = None
}
/**
* Atomically start and link an actor.
* <p/>
* To be invoked from within the actor itself.
*/
def startLink(actorRef: ActorRef): Unit = guard.withGuard {
link(actorRef)
actorRef.start()
}
/**
* Atomically create (from actor class) and start an actor.
* <p/>
* To be invoked from within the actor itself.
*/
def spawn(clazz: Class[_ <: Actor]): ActorRef =
Actor.actorOf(clazz).start()
/**
* Atomically create (from actor class), start and make an actor remote.
* <p/>
* To be invoked from within the actor itself.
*/
def spawnRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long = Actor.TIMEOUT): ActorRef = {
ensureRemotingEnabled
val ref = Actor.remote.actorOf(clazz, hostname, port)
ref.timeout = timeout
ref.start()
}
/**
* Atomically create (from actor class), start and link an actor.
* <p/>
* To be invoked from within the actor itself.
*/
def spawnLink(clazz: Class[_ <: Actor]): ActorRef = {
val actor = spawn(clazz)
link(actor)
actor.start()
actor
}
/**
* Atomically create (from actor class), start, link and make an actor remote.
* <p/>
* To be invoked from within the actor itself.
*/
def spawnLinkRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long = Actor.TIMEOUT): ActorRef = {
ensureRemotingEnabled
val actor = Actor.remote.actorOf(clazz, hostname, port)
actor.timeout = timeout
link(actor)
actor.start()
actor
}
/**
* Returns the mailbox.
*/
def mailbox: AnyRef = _mailbox
protected[akka] def mailbox_=(value: AnyRef): AnyRef = { _mailbox = value; value }
/**
* Returns the supervisor, if there is one.
*/
def supervisor: Option[ActorRef] = _supervisor
// ========= AKKA PROTECTED FUNCTIONS =========
protected[akka] def supervisor_=(sup: Option[ActorRef]): Unit = _supervisor = sup
protected[akka] def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit =
if (isClientManaged_?) {
Actor.remote.send[Any](
message, senderOption, None, homeAddress.get, timeout, true, this, None, ActorType.ScalaActor, None)
} else
dispatcher dispatchMessage new MessageInvocation(this, message, senderOption, None)
protected[akka] def postMessageToMailboxAndCreateFutureResultWithTimeout[T](
message: Any,
timeout: Long,
senderOption: Option[ActorRef],
senderFuture: Option[CompletableFuture[T]]): CompletableFuture[T] = {
if (isClientManaged_?) {
val future = Actor.remote.send[T](
message, senderOption, senderFuture, homeAddress.get, timeout, false, this, None, ActorType.ScalaActor, None)
if (future.isDefined) future.get
else throw new IllegalActorStateException("Expected a future from remote call to actor " + toString)
} else {
val future = if (senderFuture.isDefined) senderFuture else Some(new DefaultCompletableFuture[T](timeout))
dispatcher dispatchMessage new MessageInvocation(
this, message, senderOption, future.asInstanceOf[Some[CompletableFuture[Any]]])
future.get
}
}
/**
* Callback for the dispatcher. This is the single entry point to the user Actor implementation.
*/
protected[akka] def invoke(messageHandle: MessageInvocation): Unit = {
guard.lock.lock
try {
if (!isShutdown) {
currentMessage = messageHandle
try {
try {
cancelReceiveTimeout // FIXME: leave this here?
actor(messageHandle.message)
currentMessage = null // reset current message after successful invocation
} catch {
case e: InterruptedException =>
currentMessage = null // received message while actor is shutting down, ignore
case e =>
handleExceptionInDispatch(e, messageHandle.message)
}
finally {
checkReceiveTimeout // Reschedule receive timeout
}
} catch {
case e =>
EventHandler.error(e, this, messageHandle.message.toString)
throw e
}
}
} finally { guard.lock.unlock }
}
protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable) {
faultHandler match {
case AllForOneStrategy(trapExit, maxRetries, within) if trapExit.exists(_.isAssignableFrom(reason.getClass)) =>
restartLinkedActors(reason, maxRetries, within)
case OneForOneStrategy(trapExit, maxRetries, within) if trapExit.exists(_.isAssignableFrom(reason.getClass)) =>
dead.restart(reason, maxRetries, within)
case _ =>
if (_supervisor.isDefined) notifySupervisorWithMessage(Exit(this, reason))
else dead.stop()
}
}
private def requestRestartPermission(maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Boolean = {
val denied = if (maxNrOfRetries.isEmpty && withinTimeRange.isEmpty) { //Immortal
false
} else if (withinTimeRange.isEmpty) { // restrict number of restarts
maxNrOfRetriesCount += 1 //Increment number of retries
maxNrOfRetriesCount > maxNrOfRetries.get
} else { // cannot restart more than N within M timerange
maxNrOfRetriesCount += 1 //Increment number of retries
val windowStart = restartsWithinTimeRangeTimestamp
val now = System.currentTimeMillis
val retries = maxNrOfRetriesCount
//We are within the time window if it isn't the first restart, or if the window hasn't closed
val insideWindow = if (windowStart == 0) false
else (now - windowStart) <= withinTimeRange.get
//The actor is dead if it dies X times within the window of restart
val unrestartable = insideWindow && retries > maxNrOfRetries.getOrElse(1)
if (windowStart == 0 || !insideWindow) //(Re-)set the start of the window
restartsWithinTimeRangeTimestamp = now
if (windowStart != 0 && !insideWindow) //Reset number of restarts if window has expired
maxNrOfRetriesCount = 1
unrestartable
}
denied == false //If we weren't denied, we have a go
}
protected[akka] def restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]) {
def performRestart() {
val failedActor = actorInstance.get
failedActor match {
case p: Proxyable =>
failedActor.preRestart(reason)
failedActor.postRestart(reason)
case _ =>
failedActor.preRestart(reason)
val freshActor = newActor
setActorSelfFields(failedActor, null) // Only null out the references if we could instantiate the new actor
actorInstance.set(freshActor) // Assign it here so if preStart fails, we can null out the sef-refs next call
freshActor.preStart
freshActor.postRestart(reason)
}
}
def tooManyRestarts() {
_supervisor.foreach { sup =>
// can supervisor handle the notification?
val notification = MaximumNumberOfRestartsWithinTimeRangeReached(this, maxNrOfRetries, withinTimeRange, reason)
if (sup.isDefinedAt(notification)) notifySupervisorWithMessage(notification)
}
stop
}
@tailrec
def attemptRestart() {
val success = if (requestRestartPermission(maxNrOfRetries, withinTimeRange)) {
guard.withGuard[Boolean] {
_status = ActorRefInternals.BEING_RESTARTED
lifeCycle match {
case Temporary =>
shutDownTemporaryActor(this)
true
case _ => // either permanent or none where default is permanent
val success = try {
performRestart()
true
} catch {
case e =>
EventHandler.error(e, this, "Exception in restart of Actor [%s]".format(toString))
false // an error or exception here should trigger a retry
}
finally {
currentMessage = null
}
if (success) {
_status = ActorRefInternals.RUNNING
dispatcher.resume(this)
restartLinkedActors(reason, maxNrOfRetries, withinTimeRange)
}
success
}
}
} else {
tooManyRestarts()
true // done
}
if (success) () // alles gut
else attemptRestart()
}
attemptRestart() // recur
}
protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]) = {
val i = _linkedActors.values.iterator
while (i.hasNext) {
val actorRef = i.next
actorRef.lifeCycle match {
// either permanent or none where default is permanent
case Temporary => shutDownTemporaryActor(actorRef)
case _ => actorRef.restart(reason, maxNrOfRetries, withinTimeRange)
}
}
}
protected[akka] def registerSupervisorAsRemoteActor: Option[Uuid] = guard.withGuard {
ensureRemotingEnabled
if (_supervisor.isDefined) {
if (homeAddress.isDefined) Actor.remote.registerSupervisorForActor(this)
Some(_supervisor.get.uuid)
} else None
}
def linkedActors: JMap[Uuid, ActorRef] = java.util.Collections.unmodifiableMap(_linkedActors)
// ========= PRIVATE FUNCTIONS =========
private[this] def newActor: Actor = {
try {
Actor.actorRefInCreation.set(Some(this))
val a = actorFactory()
if (a eq null) throw new ActorInitializationException("Actor instance passed to ActorRef can not be 'null'")
a
} finally {
Actor.actorRefInCreation.set(None)
}
}
private def shutDownTemporaryActor(temporaryActor: ActorRef) {
temporaryActor.stop()
_linkedActors.remove(temporaryActor.uuid) // remove the temporary actor
// if last temporary actor is gone, then unlink me from supervisor
if (_linkedActors.isEmpty) notifySupervisorWithMessage(UnlinkAndStop(this))
true
}
private def handleExceptionInDispatch(reason: Throwable, message: Any) = {
EventHandler.error(reason, this, message.toString)
//Prevent any further messages to be processed until the actor has been restarted
dispatcher.suspend(this)
senderFuture.foreach(_.completeWithException(reason))
if (supervisor.isDefined) notifySupervisorWithMessage(Exit(this, reason))
else {
lifeCycle match {
case Temporary => shutDownTemporaryActor(this)
case _ => dispatcher.resume(this) //Resume processing for this actor
}
}
}
private def notifySupervisorWithMessage(notification: LifeCycleMessage) = {
// FIXME to fix supervisor restart of remote actor for oneway calls, inject a supervisor proxy that can send notification back to client
_supervisor.foreach { sup =>
if (sup.isShutdown) { // if supervisor is shut down, game over for all linked actors
//Scoped stop all linked actors, to avoid leaking the 'i' val
{
val i = _linkedActors.values.iterator
while (i.hasNext) {
i.next.stop()
i.remove
}
}
//Stop the actor itself
stop
} else sup ! notification // else notify supervisor
}
}
private def setActorSelfFields(actor: Actor, value: ActorRef) {
@tailrec
def lookupAndSetSelfFields(clazz: Class[_], actor: Actor, value: ActorRef): Boolean = {
val success = try {
val selfField = clazz.getDeclaredField("self")
val someSelfField = clazz.getDeclaredField("someSelf")
selfField.setAccessible(true)
someSelfField.setAccessible(true)
selfField.set(actor, value)
someSelfField.set(actor, if (value ne null) Some(value) else null)
true
} catch {
case e: NoSuchFieldException => false
}
if (success) true
else {
val parent = clazz.getSuperclass
if (parent eq null)
throw new IllegalActorStateException(toString + " is not an Actor since it have not mixed in the 'Actor' trait")
lookupAndSetSelfFields(parent, actor, value)
}
}
lookupAndSetSelfFields(actor.getClass, actor, value)
}
private def initializeActorInstance = {
actor.preStart // run actor preStart
Actor.registry.register(this)
}
protected[akka] def checkReceiveTimeout = {
cancelReceiveTimeout
if (receiveTimeout.isDefined && dispatcher.mailboxSize(this) <= 0) { //Only reschedule if desired and there are currently no more messages to be processed
_futureTimeout = Some(Scheduler.scheduleOnce(this, ReceiveTimeout, receiveTimeout.get, TimeUnit.MILLISECONDS))
}
}
protected[akka] def cancelReceiveTimeout = {
if (_futureTimeout.isDefined) {
_futureTimeout.get.cancel(true)
_futureTimeout = None
}
}
}
/**
* System messages for RemoteActorRef.
*
* @author <a href="http://jonasboner.com">Jonas Bonér</a>
*/
object RemoteActorSystemMessage {
val Stop = "RemoteActorRef:stop".intern
}
/**
* Remote ActorRef that is used when referencing the Actor on a different node than its "home" node.
* This reference is network-aware (remembers its origin) and immutable.
*
* @author <a href="http://jonasboner.com">Jonas Bonér</a>
*/
private[akka] case class RemoteActorRef private[akka] (
classOrServiceName: String,
val actorClassName: String,
val hostname: String,
val port: Int,
_timeout: Long,
loader: Option[ClassLoader],
val actorType: ActorType = ActorType.ScalaActor)
extends ActorRef with ScalaActorRef {
ensureRemotingEnabled
val homeAddress = Some(new InetSocketAddress(hostname, port))
//protected def clientManaged = classOrServiceName.isEmpty //If no class or service name, it's client managed
id = classOrServiceName
//id = classOrServiceName.getOrElse("uuid:" + uuid) //If we're a server-managed we want to have classOrServiceName as id, or else, we're a client-managed and we want to have our uuid as id
timeout = _timeout
start
def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit =
Actor.remote.send[Any](message, senderOption, None, homeAddress.get, timeout, true, this, None, actorType, loader)
def postMessageToMailboxAndCreateFutureResultWithTimeout[T](
message: Any,
timeout: Long,
senderOption: Option[ActorRef],
senderFuture: Option[CompletableFuture[T]]): CompletableFuture[T] = {
val future = Actor.remote.send[T](
message, senderOption, senderFuture,
homeAddress.get, timeout,
false, this, None,
actorType, loader)
if (future.isDefined) future.get
else throw new IllegalActorStateException("Expected a future from remote call to actor " + toString)
}
def start: ActorRef = synchronized {
_status = ActorRefInternals.RUNNING
this
}
def stop: Unit = synchronized {
if (_status == ActorRefInternals.RUNNING) {
_status = ActorRefInternals.SHUTDOWN
postMessageToMailbox(RemoteActorSystemMessage.Stop, None)
}
}
protected[akka] def registerSupervisorAsRemoteActor: Option[Uuid] = None
// ==== NOT SUPPORTED ====
@deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
def actorClass: Class[_ <: Actor] = unsupported
def dispatcher_=(md: MessageDispatcher): Unit = unsupported
def dispatcher: MessageDispatcher = unsupported
def link(actorRef: ActorRef): Unit = unsupported
def unlink(actorRef: ActorRef): Unit = unsupported
def startLink(actorRef: ActorRef): Unit = unsupported
def spawn(clazz: Class[_ <: Actor]): ActorRef = unsupported
def spawnRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long): ActorRef = unsupported
def spawnLink(clazz: Class[_ <: Actor]): ActorRef = unsupported
def spawnLinkRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long): ActorRef = unsupported
def supervisor: Option[ActorRef] = unsupported
def linkedActors: JMap[Uuid, ActorRef] = unsupported
protected[akka] def mailbox: AnyRef = unsupported
protected[akka] def mailbox_=(value: AnyRef): AnyRef = unsupported
protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable): Unit = unsupported
protected[akka] def restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit = unsupported
protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit = unsupported
protected[akka] def invoke(messageHandle: MessageInvocation): Unit = unsupported
protected[akka] def supervisor_=(sup: Option[ActorRef]): Unit = unsupported
protected[akka] def actorInstance: AtomicReference[Actor] = unsupported
private def unsupported = throw new UnsupportedOperationException("Not supported for RemoteActorRef")
}
/**
* This trait represents the common (external) methods for all ActorRefs
* Needed because implicit conversions aren't applied when instance imports are used
*
* i.e.
* var self: ScalaActorRef = ...
* import self._
* //can't call ActorRef methods here unless they are declared in a common
* //superclass, which ActorRefShared is.
*/
trait ActorRefShared {
/**
* Returns the uuid for the actor.
*/
def uuid: Uuid
}
/**
* This trait represents the Scala Actor API
* There are implicit conversions in ../actor/Implicits.scala
* from ActorRef -> ScalaActorRef and back
*/
trait ScalaActorRef extends ActorRefShared { ref: ActorRef =>
/**
* Identifier for actor, does not have to be a unique one. Default is the 'uuid'.
* <p/>
* This field is used for logging, AspectRegistry.actorsFor(id), identifier for remote
* actor in RemoteServer etc.But also as the identifier for persistence, which means
* that you can use a custom name to be able to retrieve the "correct" persisted state
* upon restart, remote restart etc.
*/
def id: String
def id_=(id: String): Unit
/**
* User overridable callback/setting.
* <p/>
* Defines the life-cycle for a supervised actor.
*/
@volatile
@BeanProperty
var lifeCycle: LifeCycle = UndefinedLifeCycle
/**
* User overridable callback/setting.
* <p/>
* Don't forget to supply a List of exception types to intercept (trapExit)
* <p/>
* Can be one of:
* <pre>
* faultHandler = AllForOneStrategy(trapExit = List(classOf[Exception]), maxNrOfRetries, withinTimeRange)
* </pre>
* Or:
* <pre>
* faultHandler = OneForOneStrategy(trapExit = List(classOf[Exception]), maxNrOfRetries, withinTimeRange)
* </pre>
*/
@volatile
@BeanProperty
var faultHandler: FaultHandlingStrategy = NoFaultHandlingStrategy
/**
* The reference sender Actor of the last received message.
* Is defined if the message was sent from another Actor, else None.
*/
def sender: Option[ActorRef] = {
val msg = currentMessage
if (msg eq null) None
else msg.sender
}
/**
* The reference sender future of the last received message.
* Is defined if the message was sent with sent with '!!' or '!!!', else None.
*/
def senderFuture(): Option[CompletableFuture[Any]] = {
val msg = currentMessage
if (msg eq null) None
else msg.senderFuture
}
/**
* Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
* <p/>
*
* If invoked from within an actor then the actor reference is implicitly passed on as the implicit 'sender' argument.
* <p/>
*
* This actor 'sender' reference is then available in the receiving actor in the 'sender' member variable,
* if invoked from within an Actor. If not then no sender is available.
* <pre>
* actor ! message
* </pre>
* <p/>
*/
def !(message: Any)(implicit sender: Option[ActorRef] = None): Unit = {
if (isRunning) postMessageToMailbox(message, sender)
else throw new ActorInitializationException(
"Actor has not been started, you need to invoke 'actor.start()' before using it")
}
/**
* Sends a message asynchronously and waits on a future for a reply message.
* <p/>
* It waits on the reply either until it receives it (in the form of <code>Some(replyMessage)</code>)
* or until the timeout expires (which will return None). E.g. send-and-receive-eventually semantics.
* <p/>
* <b>NOTE:</b>
* Use this method with care. In most cases it is better to use '!' together with the 'sender' member field to
* implement request/response message exchanges.
* If you are sending messages using <code>!!</code> then you <b>have to</b> use <code>self.reply(..)</code>
* to send a reply message to the original sender. If not then the sender will block until the timeout expires.
*/
def !!(message: Any, timeout: Long = this.timeout)(implicit sender: Option[ActorRef] = None): Option[Any] = {
if (isRunning) {
val future = postMessageToMailboxAndCreateFutureResultWithTimeout[Any](message, timeout, sender, None)
val isMessageJoinPoint = if (isTypedActorEnabled) TypedActorModule.resolveFutureIfMessageIsJoinPoint(message, future)
else false
try {
future.await
} catch {
case e: FutureTimeoutException =>
if (isMessageJoinPoint) {
EventHandler.error(e, this, e.getMessage)
throw e
} else None
}
future.resultOrException
} else throw new ActorInitializationException(
"Actor has not been started, you need to invoke 'actor.start()' before using it")
}
/**
* Sends a message asynchronously returns a future holding the eventual reply message.
* <p/>
* <b>NOTE:</b>
* Use this method with care. In most cases it is better to use '!' together with the 'sender' member field to
* implement request/response message exchanges.
* If you are sending messages using <code>!!!</code> then you <b>have to</b> use <code>self.reply(..)</code>
* to send a reply message to the original sender. If not then the sender will block until the timeout expires.
*/
def !!(implicit sender: Option[ActorRef] = None): Future[T] = {
if (isRunning) postMessageToMailboxAndCreateFutureResultWithTimeout[T](message, timeout, sender, None)
else throw new ActorInitializationException(
"Actor has not been started, you need to invoke 'actor.start()' before using it")
}
/**
* Forwards the message and passes the original sender actor as the sender.
* <p/>
* Works with '!', '!!' and '!!!'.
*/
def forward(message: Any)(implicit sender: Some[ActorRef]) = {
if (isRunning) {
if (sender.get.senderFuture.isDefined)
postMessageToMailboxAndCreateFutureResultWithTimeout(message, timeout, sender.get.sender, sender.get.senderFuture)
else
postMessageToMailbox(message, sender.get.sender)
} else throw new ActorInitializationException("Actor has not been started, you need to invoke 'actor.start()' before using it")
}
/**
* Use <code>self.reply(..)</code> to reply with a message to the original sender of the message currently
* being processed.
* <p/>
* Throws an IllegalStateException if unable to determine what to reply to.
*/
def reply(message: Any) = if (!reply_?(message)) throw new IllegalActorStateException(
"\\n\\tNo sender in scope, can't reply. " +
"\\n\\tYou have probably: " +
"\\n\\t\\t1. Sent a message to an Actor from an instance that is NOT an Actor." +
"\\n\\t\\t2. Invoked a method on an TypedActor from an instance NOT an TypedActor." +
"\\n\\tElse you might want to use 'reply_?' which returns Boolean(true) if success and Boolean(false) if no sender in scope")
/**
* Use <code>reply_?(..)</code> to reply with a message to the original sender of the message currently
* being processed.
* <p/>
* Returns true if reply was sent, and false if unable to determine what to reply to.
*/
def reply_?(message: Any): Boolean = {
if (senderFuture.isDefined) {
senderFuture.get completeWithResult message
true
} else if (sender.isDefined) {
//TODO: optimize away this allocation, perhaps by having implicit self: Option[ActorRef] in signature
sender.get.!(message)(Some(this))
true
} else false
}
/**
* Atomically create (from actor class) and start an actor.
*/
def spawn[T <: Actor: ClassTag]: ActorRef =
spawn(classTag[T].erasure.asInstanceOf[Class[_ <: Actor]])
/**
* Atomically create (from actor class), start and make an actor remote.
*/
def spawnRemote[T <: Actor: ClassTag](hostname: String, port: Int, timeout: Long): ActorRef = {
ensureRemotingEnabled
spawnRemote(classTag[T].erasure.asInstanceOf[Class[_ <: Actor]], hostname, port, timeout)
}
/**
* Atomically create (from actor class), start and link an actor.
*/
def spawnLink[T <: Actor: ClassTag]: ActorRef =
spawnLink(classTag[T].erasure.asInstanceOf[Class[_ <: Actor]])
/**
* Atomically create (from actor class), start, link and make an actor remote.
*/
def spawnLinkRemote[T <: Actor: ClassTag](hostname: String, port: Int, timeout: Long): ActorRef = {
ensureRemotingEnabled
spawnLinkRemote(classTag[T].erasure.asInstanceOf[Class[_ <: Actor]], hostname, port, timeout)
}
}
| felixmulder/scala | test/disabled/presentation/akka/src/akka/actor/ActorRef.scala | Scala | bsd-3-clause | 50,617 |
package funsets
import org.scalatest.FunSuite
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
/**
* This class is a test suite for the methods in object FunSets. To run
* the test suite, you can either:
* - run the "test" command in the SBT console
* - right-click the file in eclipse and chose "Run As" - "JUnit Test"
*/
@RunWith(classOf[JUnitRunner])
class FunSetSuite extends FunSuite {
/**
* Link to the scaladoc - very clear and detailed tutorial of FunSuite
*
* http://doc.scalatest.org/1.9.1/index.html#org.scalatest.FunSuite
*
* Operators
* - test
* - ignore
* - pending
*/
/**
* Tests are written using the "test" operator and the "assert" method.
*/
// test("string take") {
// val message = "hello, world"
// assert(message.take(5) == "hello")
// }
/**
* For ScalaTest tests, there exists a special equality operator "===" that
* can be used inside "assert". If the assertion fails, the two values will
* be printed in the error message. Otherwise, when using "==", the test
* error message will only say "assertion failed", without showing the values.
*
* Try it out! Change the values so that the assertion fails, and look at the
* error message.
*/
// test("adding ints") {
// assert(1 + 2 === 3)
// }
import FunSets._
test("contains is implemented") {
assert(contains(x => true, 100))
}
/**
* When writing tests, one would often like to re-use certain values for multiple
* tests. For instance, we would like to create an Int-set and have multiple test
* about it.
*
* Instead of copy-pasting the code for creating the set into every test, we can
* store it in the test class using a val:
*
* val s1 = singletonSet(1)
*
* However, what happens if the method "singletonSet" has a bug and crashes? Then
* the test methods are not even executed, because creating an instance of the
* test class fails!
*
* Therefore, we put the shared values into a separate trait (traits are like
* abstract classes), and create an instance inside each test method.
*
*/
trait TestSets {
val s1 = singletonSet(1)
val s2 = singletonSet(2)
val s3 = singletonSet(3)
}
/**
* This test is currently disabled (by using "ignore") because the method
* "singletonSet" is not yet implemented and the test would fail.
*
* Once you finish your implementation of "singletonSet", exchange the
* function "ignore" by "test".
*/
test("singletonSet(1) contains 1") {
/**
* We create a new instance of the "TestSets" trait, this gives us access
* to the values "s1" to "s3".
*/
new TestSets {
/**
* The string argument of "assert" is a message that is printed in case
* the test fails. This helps identifying which assertion failed.
*/
assert(contains(s1, 1), "Singleton")
}
}
test("union contains all elements of each set") {
new TestSets {
val s = union(s1, s2)
assert(contains(s, 1), "Union 1")
assert(contains(s, 2), "Union 2")
assert(!contains(s, 3), "Union 3")
}
}
test("map all elements to their squares") {
new TestSets {
val s = union(s1, s2)
assert(contains(map(s, x=>x*x),1), "1 Square is 1")
assert(contains(map(s, x=>x*x),4), "2 Square is 4")
}
}
}
| comprakash/learning-scala | funsets/src/test/scala/funsets/FunSetSuite.scala | Scala | gpl-3.0 | 3,403 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
case class CP263(value: Option[Int]) extends CtBoxIdentifier("Post reform losses brought forward and deducted") with CtOptionalInteger
object CP263 extends Calculated[CP263, ComputationsBoxRetriever] with CtTypeConverters {
override def calculate(boxRetriever: ComputationsBoxRetriever): CP263 = {
CP263(
(boxRetriever.cp283b().value, boxRetriever.chooseCp997().value) match {
case (None, None) => None
case _ => Some(boxRetriever.cp283b() + boxRetriever.chooseCp997())
}
)
}
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/computations/CP263.scala | Scala | apache-2.0 | 1,260 |
package com.github.diegopacheco.sandbox.scala.disruptor.fun
import com.lmax.disruptor.EventFactory
import com.lmax.disruptor.EventHandler
import com.lmax.disruptor.RingBuffer
import java.nio.ByteBuffer
import scala.beans.BeanProperty
import com.lmax.disruptor.EventTranslatorOneArg
import com.lmax.disruptor.dsl.Disruptor
import java.util.concurrent.Executor
import java.util.concurrent.Executors
case class LongEvent(@BeanProperty var value:Long = 0)
class LongEventFactory extends EventFactory[LongEvent]{
override def newInstance():LongEvent = LongEvent()
}
class LongEventHandler extends EventHandler[LongEvent] {
override def onEvent(event:LongEvent, sequence:Long, endOfBatch:Boolean):Unit = println("Event: " + event)
}
class LongEventProducer(var ringBuffer:RingBuffer[LongEvent]){
def onData(bb:ByteBuffer):Unit = {
var sequence:Long = ringBuffer.next()
try{
var event:LongEvent = ringBuffer.get(sequence)
event.setValue(bb.getLong(0))
}finally{
ringBuffer.publish(sequence)
}
}
}
class LongEventProducerWithTranslator(var ringBuffer:RingBuffer[LongEvent]){
val TRANSLATOR:EventTranslatorOneArg[LongEvent, ByteBuffer] = new EventTranslatorOneArg[LongEvent, ByteBuffer]() {
def translateTo(event:LongEvent, sequence:Long, bb:ByteBuffer):Unit = event.setValue(bb.getLong(0))
}
def onData(bb:ByteBuffer):Unit = ringBuffer.publishEvent(TRANSLATOR, bb)
}
object DisruptorApp extends App {
// Executor that will be used to construct new threads for consumers
val executor:Executor = Executors.newCachedThreadPool()
// The factory for the event
val factory:LongEventFactory = new LongEventFactory()
// Specify the size of the ring buffer, must be power of 2.
val bufferSize:Int = 1024
// Construct the Disruptor
val disruptor:Disruptor[LongEvent] = new Disruptor[LongEvent](factory, bufferSize, executor)
// Connect the handler
disruptor.handleEventsWith(new LongEventHandler())
// Start the Disruptor, starts all threads running
disruptor.start()
// Get the ring buffer from the Disruptor to be used for publishing.
val ringBuffer:RingBuffer[LongEvent] = disruptor.getRingBuffer()
val producer:LongEventProducer = new LongEventProducer(ringBuffer)
val bb:ByteBuffer = ByteBuffer.allocate(8)
var l:Long = 0
while(true){
bb.putLong(0, l);
producer.onData(bb);
Thread.sleep(1000);
l = l + 1
}
} | diegopacheco/scala-playground | lmax-disruptor-fun/src/main/scala/com/github/diegopacheco/sandbox/scala/disruptor/fun/DisruptorApp.scala | Scala | unlicense | 2,621 |
package models.slick.systemmanage
import com.typesafe.slick.driver.oracle.OracleDriver.simple._
import models.systemmanage.System
import scala.slick.lifted._
/**
* Created by hooxin on 15-2-11.
*/
class SystemTable(tag:Tag) extends Table[System](tag,"t_system"){
def id = column[String]("id")
def systemname = column[String]("systemname")
def systemdefine = column[String]("systemdefine")
def picturepath = column[String]("picturepath")
def parentsystemcode = column[String]("parentsystemcode")
def nodeorder = column[Int]("nodeorder")
def isleaf = column[String]("isleaf")
def fullcode = column[String]("fullcode")
def * = (
id,
systemname,
systemdefine,
picturepath,
parentsystemcode,
nodeorder,
isleaf,
fullcode
) <> (System.tupled,System.unapply)
}
| firefoxmmx2/techsupport_ext4_scala | app/models/slick/systemmanage/SystemTable.scala | Scala | apache-2.0 | 828 |
package blended.streams.file
import java.io.File
import blended.testsupport.BlendedTestSupport
import com.typesafe.config.Config
class DirectorySourceSpec extends AbstractFileSourceSpec {
"The Directory Source should" - {
"not deliver the same file within the poll interval" in {
val rawCfg : Config = ctCtxt.containerConfig.getConfig("simplePoll")
val pollCfg : FilePollConfig = FilePollConfig(rawCfg, ctCtxt).copy(
sourceDir = BlendedTestSupport.projectTestOutput + "/dirSource",
pattern = Some("^.*txt$")
)
prepareDirectory(pollCfg.sourceDir)
genFile(new File(pollCfg.sourceDir, "test.txt"))
val dirSource : DirectorySource = new DirectorySource(pollCfg)
dirSource.nextFile() should be (defined)
dirSource.nextFile() should be (empty)
Thread.sleep(pollCfg.interval.toMillis + 20)
dirSource.nextFile() should be (defined)
}
}
}
| woq-blended/blended | blended.streams/src/test/scala/blended/streams/file/DirectorySourceSpec.scala | Scala | apache-2.0 | 929 |
package bundlepricing
import bundlepricing.Discounter.{EmptyCart, PartialResult}
import bundlepricing.data.{Bundle, Dollars, Item, Quantity}
import bundlepricing.util.{NonEmptyMap, NonEmptySet, undiscountedTotal}
import scala.annotation.tailrec
import scalaz.std.iterable
import scalaz.std.list._
import scalaz.syntax.std.boolean._
import scalaz.syntax.std.option._
/**
* @param bundles any available bundles
*/
case class Discounter(bundles: Set[Bundle]) {
/**
* Apply the best combination of bundle savings to the cart
* @param cart must contain items, to produce a total
* @return
*/
def total(cart: NonEmptyMap[Item, Quantity]): Dollars = {
// import comparison operators without them shadowing each other
import Dollars.ordering.{mkOrderingOps => dollarsOrderingOps}
import Quantity.ordering.{mkOrderingOps => qtyOrderingOps}
/** Does the cart contain the bundle's items in sufficient quantity? */
def canApplyBundle(cart: Map[Item, Quantity], bundle: Bundle): Boolean =
bundle.items.forall {
case (product, neededQuantity) =>
cart.get(product).map(cartQuantity => cartQuantity >= neededQuantity).getOrElse(false)
}
/** Remove bundle item quantities from a cart */
def applyBundle(cartRemaining: Map[Item, Quantity], bundle: Bundle): Option[Map[Item, Quantity]] =
canApplyBundle(cartRemaining, bundle).option {
bundle.items.foldLeft(cartRemaining) {
case (cart, (product, quantity)) =>
if (cart(product) equiv quantity)
cart - product
else
cart.updated(product, cart(product) - quantity)
}
}
/** try applying combinations of bundles until the lowest price combination is determined */
@tailrec def loop(open: List[PartialResult], bestPrice: Dollars): Dollars = {
open match {
case Nil =>
// no more candidate results
bestPrice
case PartialResult(EmptyCart(), _, subtotal) :: tail =>
// no more items in cart. update the bestPrice if ours is better
loop(tail, bestPrice min subtotal)
case PartialResult(cartRemaining, Nil, subtotal) :: tail =>
// no more bundles to apply. add up the remaining items and update the bestPrice if ours better
loop(tail, bestPrice min (subtotal + undiscountedTotal(cartRemaining)(iterable.iterableSubtypeFoldable)))
case PartialResult(cartRemaining, bundle :: moreBundles, subtotal) :: tail =>
// choice of applying this bundle to the cart or not
def useBundleScenario(reducedCart: Map[Item, Quantity]) = // represents applying the bundle
PartialResult(reducedCart, bundle :: moreBundles, subtotal + bundle.price)
def ignoreBundleScenario = // represents not applying the bundle
PartialResult(cartRemaining, moreBundles, subtotal)
loop(
// try applying the bundle if possible, not applying the bundle, and everything else we were going to try
open = applyBundle(cartRemaining, bundle).map(useBundleScenario).orEmpty[List] ++ (ignoreBundleScenario :: tail),
bestPrice = bestPrice
)
}
}
/** ignore bundles for things we aren't buying */
def relevantBundles: Set[Bundle] = bundles.filter(canApplyBundle(cart.toMap, _))
// start with a full cart and the undiscounted total and begin search
loop(
open = List(PartialResult(cart.toMap, relevantBundles.toList, Dollars(0))),
bestPrice = undiscountedTotal(cart.toNel)
)
}
}
object Discounter {
/** A partially-evaluated cart */
private case class PartialResult(cartRemaining: Map[Item, Quantity], bundlesRemaining: List[Bundle], subtotal: Dollars)
/** Pattern matching an empty remaining cart */
private object EmptyCart { def unapply(m: Map[Item, Quantity]): Boolean = m.isEmpty }
} | refried/bundle-pricing | src/main/scala/bundlepricing/Discounter.scala | Scala | mit | 3,918 |
/*
* This file is part of Apparat.
*
* Copyright (C) 2010 Joa Ebert
* http://www.joa-ebert.com/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package apparat.bytecode
import apparat.abc.AbcName
class BytecodeExceptionHandler(val from: Marker, val to: Marker, val target: Marker, val typeName: AbcName, val varName: AbcName) {
override def toString = "(" + from.toString + ", " + to.toString + ") => " + target + ", " + typeName.toString + ", " + varName.toString
}
| joa/apparat | apparat-core/src/main/scala/apparat/bytecode/BytecodeExceptionHandler.scala | Scala | lgpl-2.1 | 1,174 |
package avrohugger
package format
package scavro
import format.abstractions.Importer
import avrohugger.input.{DependencyInspector, NestedSchemaExtractor}
import avrohugger.matchers.TypeMatcher
import avrohugger.matchers.custom.CustomNamespaceMatcher
import avrohugger.stores.SchemaStore
import avrohugger.types._
import org.apache.avro.{ Protocol, Schema }
import org.apache.avro.Schema.Field
import org.apache.avro.Schema.Type.RECORD
import treehugger.forest._
import definitions._
import treehuggerDSL._
import scala.collection.JavaConverters._
object ScavroImporter extends Importer {
def asRenamedImportTree(imported: Import) = {
val packageSym = imported.expr
val typeNames = imported.selectors.map(s => treeToString(s.name))
val renames = typeNames.distinct.sorted.map(typeName => {
val renamedType = "J" + typeName
RENAME(typeName) ==> renamedType
})
IMPORT(packageSym, renames)
}
def getImports(
schemaOrProtocol: Either[Schema, Protocol],
currentNamespace: Option[String],
schemaStore: SchemaStore,
typeMatcher: TypeMatcher): List[Import] = {
def checkJavaConversions(schemaOrProtocol: Either[Schema, Protocol]): Option[Import] = {
def checkForArrays(schema: Schema, used: List[Schema] = List.empty): List[Schema] = {
schema.getType match {
case Schema.Type.MAP =>
checkForArrays(schema.getValueType, used)
case Schema.Type.RECORD =>
getFieldSchemas(schema).flatMap(s => {
if (used.contains(s)) List(s)
else checkForArrays(s, used :+ s)
})
case Schema.Type.UNION =>
val types = schema.getTypes().asScala
if (types.length != 2 ||
!types.map(x => x.getType).contains(Schema.Type.NULL) ||
types.filterNot(x => x.getType == Schema.Type.NULL).length != 1) {
sys.error("Unions beyond nullable fields are not supported")
}
else {
val maybeType = types.find(x => x.getType != Schema.Type.NULL)
maybeType match {
case Some(s) => checkForArrays(s, used)
case None => sys.error("There was no type in this union")
}
}
case _ => List(schema)
}
}
val schemas: List[Schema] = schemaOrProtocol match {
case Left(schema) => checkForArrays(schema)
case Right(protocol) => {
protocol.getTypes().asScala.toList
.filter(schema => isRecord(schema))
.flatMap(schema => checkForArrays(schema))
}
}
val hasArrayField: Boolean =
schemas.map(schema => schema.getType).contains(Schema.Type.ARRAY)
val convPackage = RootClass.newClass("scala.collection.JavaConverters")
val javaConvertersImport = IMPORT(convPackage, "_")
if(hasArrayField) Some(javaConvertersImport)
else None
}
lazy val SchemaClass = RootClass.newClass("org.apache.avro.Schema")
lazy val ScavroPackage = RootClass.newPackage("org.oedura.scavro")
lazy val schemaImport = IMPORT(SchemaClass)
lazy val scavroImport = IMPORT(
ScavroPackage,
"AvroMetadata",
"AvroReader",
"AvroSerializeable")
lazy val baseImports: List[Import] =
List(schemaImport, scavroImport)
lazy val maybeJavaConversionsImport: Option[Import] =
checkJavaConversions(schemaOrProtocol)
// gets all record schemas, including the root schema, which need renaming
def getAllRecordSchemas(topLevelSchemas: List[Schema]): List[Schema] = {
topLevelSchemas
.filter(isRecord)
.flatMap(schema => schema +: getFieldSchemas(schema))
.distinct
}
// gets imported Java model classes, returning them as Import trees
def getJavaRecordImports(
recordSchemas: List[Schema],
namespace: Option[String],
typeMatcher: TypeMatcher): List[Import] = {
def asImportDef(packageName: String, fields: List[Schema]): Import = {
val importedPackageSym = RootClass.newClass(packageName)
val importedTypes =
fields.map(field => DependencyInspector.getReferredTypeName(field))
IMPORT(importedPackageSym, importedTypes)
}
def requiresImportDef(
schema: Schema,
typeMatcher: TypeMatcher): Boolean = {
def isTopLevel(schema: Schema) = typeMatcher.avroScalaTypes.enum match {
case EnumAsScalaString =>
isRecord(schema)
case JavaEnum | ScalaEnumeration | ScalaCaseObjectEnum =>
(isRecord(schema) || (isEnum(schema)))
}
isTopLevel(schema) && Option(schema.getNamespace) != namespace
}
recordSchemas
.filter(schema => requiresImportDef(schema, typeMatcher))
.groupBy(schema => Option(schema.getNamespace).getOrElse(""))
.toList
.map(group => group match {
case(packageName, fields) => asImportDef(packageName, fields)
})
}
// gets imported Scavro model classes, returning them as Import trees
def getTopLevelImports(
recordSchemas: List[Schema],
namespace: Option[String]): List[Import] = {
recordSchemas
.filter(schema => DependencyInspector.getReferredNamespace(schema).isDefined)
.filter(schema => {
val renamedNamespace = ScavroNamespaceRenamer.renameNamespace(
DependencyInspector.getReferredNamespace(schema),
Left(schema),
typeMatcher)
renamedNamespace != namespace
})
.groupBy(schema => {
val renamedNamespace = ScavroNamespaceRenamer.renameNamespace(
DependencyInspector.getReferredNamespace(schema),
Left(schema),
typeMatcher)
renamedNamespace.get
})
.toList.map(group => group match {
case(packageName, fields) => {
val importedPackageSym = RootClass.newClass(packageName)
val importedTypes = fields.map(field => DependencyInspector.getReferredTypeName(field))
IMPORT(importedPackageSym, importedTypes)
}
})
}
val topLevelSchemas = getTopLevelSchemas(schemaOrProtocol, schemaStore, typeMatcher)
val allRecordSchemas = getAllRecordSchemas(topLevelSchemas)
val scalaRecordImports = getUserDefinedImports(
allRecordSchemas,
currentNamespace,
typeMatcher)
val javaRecordImports = getJavaRecordImports(
allRecordSchemas,
currentNamespace,
typeMatcher)
val renamedJavaImports = javaRecordImports.map(asRenamedImportTree)
val scalaRecords = getRecordSchemas(topLevelSchemas)
val enumSchemas = getEnumSchemas(topLevelSchemas)
val scalaImports = getTopLevelImports(scalaRecords ++ enumSchemas, currentNamespace)
val recordImports = (scalaImports ++ renamedJavaImports).distinct
if (allRecordSchemas.isEmpty) List.empty
else baseImports ++ recordImports ++ maybeJavaConversionsImport
}
}
| julianpeeters/avrohugger | avrohugger-core/src/main/scala/format/scavro/ScavroImporter.scala | Scala | apache-2.0 | 7,038 |
package com.basdek.mailchimp_v3.helpers
import scala.io.Source
import com.basdek.mailchimp_v3.Config
import com.typesafe.config.ConfigFactory
/**
* This trait helps loading a Config based upon ENV-vars (for test purposes,
* one is free to do it in whatever way one sees fit when using the lib, through
* passing a Config instance to an Operation.)
*/
trait ConfigLoader {
private val confSource = Source.fromURL(getClass.getResource("/TestConfig.conf"))
private val conf = ConfigFactory.parseReader(confSource.bufferedReader()).resolve()
/**
* Returns the api endpoint.
* @return The url for the api endpoint.
*/
val apiEndpoint : String = conf.getString("mailchimp.api_endpoint")
/**
* Returns the api key.
* @return An api key.
*/
val apiKey : String = conf.getString("mailchimp.api_key")
/**
* Creates a default Config (loading the ENV vars).
* @return A Config instance.
*/
def defaultCfg : Config =
new Config(apiKey = this.apiKey, apiEndpoint = this.apiEndpoint)
}
| basdek/mailchimp_v3 | src/test/scala/com/basdek/mailchimp_v3/helpers/ConfigLoader.scala | Scala | mit | 1,043 |
val stringList = List("A", "b", "c", "d", "TOTO")
var size1 = stringList.foldLeft(0)((i, value) => i + value.length)
val size2 = (0 /: stringList) {(i, value) => i + value.length}
if (size1 == size2) {
println ("function match and the size is " + size1)
} else {
println ("function doesn't match")
} | brice/seven-adventure | Week4/day2/stringlist.scala | Scala | gpl-3.0 | 311 |
package com.sksamuel.elastic4s.searches.aggs
import com.sksamuel.elastic4s.searches.aggs.pipeline.PipelineAggregationBuilderFn
import org.elasticsearch.search.aggregations.AggregationBuilders
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder
object GlobalAggregationBuilder {
import scala.collection.JavaConverters._
def apply(agg: GlobalAggregationDefinition): GlobalAggregationBuilder = {
val builder = AggregationBuilders.global(agg.name)
agg.subaggs.map(AggregationBuilder.apply).foreach(builder.subAggregation)
agg.pipelines.map(PipelineAggregationBuilderFn.apply).foreach(builder.subAggregation)
if (agg.metadata.nonEmpty) builder.setMetaData(agg.metadata.asJava)
builder
}
}
| tyth/elastic4s | elastic4s-tcp/src/main/scala/com/sksamuel/elastic4s/searches/aggs/GlobalAggregationBuilder.scala | Scala | apache-2.0 | 746 |
package net.sansa_stack.ml.spark.similarity.similarityEstimationModels
import org.apache.spark.ml.linalg.Vector
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions.{col, udf}
class BraunBlanquetModel extends GenericSimilarityEstimatorModel {
protected val braunBlanquet = udf((a: Vector, b: Vector) => {
val featureIndicesA = a.toSparse.indices
val featureIndicesB = b.toSparse.indices
val fSetA = featureIndicesA.toSet
val fSetB = featureIndicesB.toSet
val braunBlanquet = (fSetA.intersect(fSetB).size.toDouble) / Seq(fSetA.size.toDouble, fSetB.size.toDouble).max
braunBlanquet
})
override val estimatorName: String = "BraunBlanquetSimilarityEstimator"
override val estimatorMeasureType: String = "similarity"
override val similarityEstimation = braunBlanquet
override def similarityJoin(dfA: DataFrame, dfB: DataFrame, threshold: Double = -1.0, valueColumn: String = "braunBlanquetSimilarity"): DataFrame = {
setSimilarityEstimationColumnName(valueColumn)
val crossJoinDf = createCrossJoinDF(dfA: DataFrame, dfB: DataFrame)
val joinDf: DataFrame = crossJoinDf.withColumn(
valueColumn,
similarityEstimation(col("featuresA"), col("featuresB"))
)
reduceJoinDf(joinDf, threshold)
}
override def nearestNeighbors(dfA: DataFrame, key: Vector, k: Int, keyUri: String = "unknown", valueColumn: String = "braunBlanquetSimilarity", keepKeyUriColumn: Boolean = false): DataFrame = {
setSimilarityEstimationColumnName(valueColumn)
val nnSetupDf = createNnDF(dfA, key, keyUri)
val nnDf = nnSetupDf
.withColumn(
valueColumn,
similarityEstimation(col("featuresA"), col("featuresB")))
reduceNnDf(nnDf, k, keepKeyUriColumn)
}
}
| SANSA-Stack/SANSA-RDF | sansa-ml/sansa-ml-spark/src/main/scala/net/sansa_stack/ml/spark/similarity/similarityEstimationModels/BraunBlanquetModel.scala | Scala | apache-2.0 | 1,763 |
package com.geteit.rcouch.views
import java.net.URLEncoder
import Query._
import spray.http.Uri
import play.api.libs.json._
import play.api.libs.json.JsString
import scala.Some
import com.geteit.rcouch.views.Query.BBox
import scala.reflect.macros.Context
/**
* The Query class allows custom view-queries to the Couchbase cluster.
*
* The Query class supports all arguments that can be passed along with a
* Couchbase view query. For example, this makes it possible to change the
* sorting order, query only a range of keys or include the full docs.
*
* By default, the full docs are not included and no reduce job is executed.
*/
case class Query(key: Option[Key] = None,
keys: List[Key] = Nil,
group: Option[Boolean] = None,
groupLevel: Option[Int] = None,
limit: Option[Int] = None,
skip: Option[Int] = None,
startKey: Option[Key] = None,
endKey: Option[Key] = None,
inclusiveEnd: Option[Boolean] = None,
startKeyDocId: Option[String] = None,
endKeyDocId: Option[String] = None,
onError: Option[OnError] = None,
debug: Option[Boolean] = None,
bBox: Option[BBox] = None,
descending: Option[Boolean] = None,
stale: Option[Stale] = None,
reduce: Option[Boolean] = None,
includeDocs: Boolean = false
) {
/**
* Returns the Query object as a string, suitable for the HTTP queries.
*
* @return Returns the query object as its string representation
*/
override def toString: String = httpQuery.toString()
def httpQuery: Uri.Query = {
Query.buildQuery(
("key", key),
("keys", if (keys.isEmpty) None else Some(keys.mkString("[", ",", "]"))),
("group", group),
("group_level", groupLevel),
("limit", limit),
("skip", skip),
("startkey", startKey),
("endkey", endKey),
("startkey_docid", startKeyDocId),
("endkey_docid", endKeyDocId),
("inclusive_end=", inclusiveEnd),
("reduce", reduce),
("on_error", onError),
("bbox", bBox),
("debug", debug),
("stale", stale),
("descending", descending)
)
}
}
object Query {
sealed trait StrArg {
protected val str: String
override def toString: String = str
}
sealed abstract class OnError(protected val str: String) extends StrArg
object OnError {
case object Stop extends OnError("stop")
case object Continue extends OnError("continue")
}
sealed abstract class Stale(protected val str: String) extends StrArg
object Stale {
case object Ok extends Stale("ok")
case object False extends Stale("false")
case object UpdateAdter extends Stale("update_after")
}
case class BBox(lowerLeftLong: Double, lowerLeftLat: Double, upperRightLong: Double, upperRightLat: Double) {
override def toString = lowerLeftLong + "," + lowerLeftLat + "," + upperRightLong + "," + upperRightLat
}
private def buildQuery(parts: (String, Option[Any])*): Uri.Query = parts.foldLeft(Uri.Query.Empty: Uri.Query)((q, p) => p match {
case (k, Some(v)) => (k, v.toString) +: q
case _ => q
})
}
case class Key(json: JsValue) {
override def toString: String = Json.stringify(json)
}
object Key {
import scala.language.implicitConversions
object String {
def unapply(key: Key) = key match {
case Key(JsString(str)) => Some(str)
case _ => None
}
}
object Boolean {
def unapply(key: Key) = key match {
case Key(JsBoolean(v)) => Some(v)
case _ => None
}
}
implicit val fmt = new Format[Key]{
def reads(json: JsValue): JsResult[Key] = JsSuccess(Key(json))
def writes(o: Key): JsValue = o.json
}
implicit def value_to_key[A](v: A)(implicit writes: Writes[A]) = Key(writes.writes(v))
/*
* Creates key from tuple.
*
* TODO: reimplement - could probably be better handled as macro, current implementation is not typesafe and very limited
*/
implicit def product_to_key[A <: Product](v: A) = Key(JsArray(
v.productIterator.map {
case s: String => JsString(s)
case c: Char => JsString(c.toString)
case b: Boolean => JsBoolean(b)
case i: Int => JsNumber(i)
case d: Double => JsNumber(d)
case f: Float => JsNumber(f)
case _: Unit => Json.obj()
case elem => throw new IllegalArgumentException(s"Unexpected tuple key item: $elem")
}.toSeq
))
}
| zbsz/reactive-couch | src/main/scala/com/geteit/rcouch/views/Query.scala | Scala | apache-2.0 | 4,583 |
/* ---------------------------------------------------------------------
%%
%% Copyright (c) 2007-2014 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% ---------------------------------------------------------------------*/
Table.create("books_hash", "Id", "N")
val i1 = new Item(
("Id", "N", "101"),
("Title", "S", "Some Title"))
val i2 = new Item(
("Id", "N", "102"),
("Title", "S", "Another Title"))
val i3 = new Item(
("Id", "N", "101"),
("Title", "S", "Tale of Two Databases"))
Table.put("books_hash")(i1, i2, i3)
Table.get("books_hash", "Id", "101", "N")
| basho-labs/rinamo | tests/com.basho.dynamodb.integ/console/hash.scala | Scala | apache-2.0 | 1,167 |
package teststate.typeclass
trait ~~>[F[_], G[_]] {
def apply[A](fa: => F[A]): G[A]
}
| japgolly/test-state | core/shared/src/main/scala/teststate/typeclass/NaturalTransformation.scala | Scala | apache-2.0 | 90 |
/**
* Copyright 2013-2015 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.paypal.cascade.common.properties
import java.io.IOException
import java.util.Properties
import com.paypal.cascade.common.logging.LoggingSugar
/**
* Class specifically for accessing values from build.properties.
* @param propertiesResourcePath the path to the .properties resource file. See
* <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Class.html#getResource%28java.lang.String%29">here</a>
* for more information on how to pass this argument.
*/
class BuildProperties(propertiesResourcePath: String = "/build.properties") extends LoggingSugar {
// at first use, try to retrieve a Properties object
private lazy val props: Option[Properties] = {
Option(getClass.getResource(propertiesResourcePath)).flatMap { url =>
try {
val stream = url.openStream()
val p = new Properties
try {
p.load(stream)
Some(p)
} finally {
stream.close()
}
} catch {
case ioe: IOException =>
getLogger[BuildProperties].warn(s"Unable to load $propertiesResourcePath", ioe)
None
}
}
}
/**
* Retrieves an optional value from a lazily-loaded `java.util.Properties` object.
* @param key the key to retrieve
* @return an optional String value for the given `key`
*/
def get(key: String): Option[String] = props.flatMap(p => Option(p.getProperty(key)))
}
| 2rs2ts/cascade | common/src/main/scala/com/paypal/cascade/common/properties/BuildProperties.scala | Scala | apache-2.0 | 2,005 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3
import org.neo4j.cypher.internal.compiler.v2_3.commands.expressions.Identifier
import org.neo4j.cypher.internal.compiler.v2_3.commands.predicates.HasLabel
import org.neo4j.cypher.internal.compiler.v2_3.commands.values.KeyToken
import org.neo4j.cypher.internal.frontend.v2_3.SyntaxException
/**
* LabelSpec represent parsed label sets before they are turned into either expressions or predicates
*
* They come in three forms
*
* <ul>
* <li>LabelSet.empty denotes that no labels have been parsed</li>
* <li>LabelSet(Some(expr)) denotes that a single set of labels has been parsed</li>
* <li>LabelChoice(labelSets) denotes that multiple sets of labels have been parsed</li>
* </ul>
*
*/
sealed abstract class LabelSpec {
/**
* @return true if this has been created without given any labels at all
*/
def bare: Boolean
/**
* @return all LabelSets contained in this LabelSpec
*/
def allSets: Seq[LabelSet]
/**
* @throws SyntaxException if this is a LabelChoice
* @return this as a LabelSet
*/
def asLabelSet: LabelSet
def toPredicates(ident: Identifier): Seq[HasLabel] = asLabelSet.labelVals.map(HasLabel(ident,_))
/**
* Reduce a LabelChoice to a LabelSet if possible
*
* @return a simplified LabelSpec
*/
def simplify: LabelSpec = this
}
object LabelSet {
val empty = LabelSet(Seq.empty)
}
final case class LabelSet(labelVals: Seq[KeyToken]) extends LabelSpec {
val bare = labelVals.isEmpty
def allSets = if (bare) Seq.empty else Seq(this)
def asLabelSet = this
}
final case class LabelChoice(override val allSets: LabelSet*) extends LabelSpec {
def bare = allSets.isEmpty
def asLabelSet: LabelSet = throw new SyntaxException("Required single label set or none but found too many")
override def simplify: LabelSpec =
if (allSets.isEmpty)
LabelSet.empty
else if (allSets.tail.isEmpty)
allSets.head
else
this
}
| HuangLS/neo4j | community/cypher/cypher-compiler-2.3/src/main/scala/org/neo4j/cypher/internal/compiler/v2_3/LabelSpec.scala | Scala | apache-2.0 | 2,772 |
package com.daxin.scalapackage
/**
*
* 方法和函数的区别:
* 方法的定义使用def关键字 : def add(x: Int, y: Int): Int = { x + y }
*
* 函数的定义:val add=(x:Int,y:Int)=>{x+y}
*
*
* 函数与方法最大区别: 函数可以作为值传入到方法中
*
* Created by daxin on 16/3/25.
*/
object SourceStudy {
def main(args: Array[String]) {
val v = Vector(1, 2, 3, 4, 5);
// 函数的定义,函数使用=>转换
val fun = (x: Int) => {
if (x % 2 == 0) x else x + 1
}
println(v.map(fun))
v.map(_ * 2) //下划线是一个占位符
println("===============")
//下面map中传入的是一个匿名函数
val r1 = v.map((x: Int) => {
if (x % 2 == 0) x else x + 1
})
println("r1:" + r1)
//类型推断,因为已经知道v中的类型,所以在定义匿名函数时候可以省略匿名函数参数类型
val r2 = v.map((x) => {
if (x % 2 == 0) x else x + 1
})
println("r2:" + r2)
/**
* 调用自己定义的接受函数的方法
*/
val add = (x: Int, y: Int) => {
x + y
}
val result = addByFunction(add, 3, 2)
println(result)
}
/**
* 方法定义
* 方法的定义使用def关键字,参数类型,以及返回值(函数没有返回值,函数是转换)
*
* @param x
* @param y
* @return
*/
def add(x: Int, y: Int): Int = {
x + y
}
/**
* 定义个接受一个函数的方法
*
* 函数的格式是接受两个int 返回int格式的函数
*
* @param f
* @return
*/
def addByFunction(f: (Int, Int) => Int, x: Int, y: Int): Int = {
f(x, y)
}
}
| Dax1n/Scala | SourceStudy.scala | Scala | apache-2.0 | 1,686 |
/**
* Copyright 2016, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.deeplang
import org.apache.spark.sql.types.StructType
import io.deepsense.deeplang.doperables.dataframe.DataFrame
import io.deepsense.deeplang.inference.{InferContext, InferenceWarnings}
import io.deepsense.deeplang.DPortPosition.DPortPosition
trait DataFrame2To1Operation { self: DOperation2To1[DataFrame, DataFrame, DataFrame] =>
override def inPortsLayout: Vector[DPortPosition] =
Vector(DPortPosition.Left, DPortPosition.Right)
override protected final def inferKnowledge(
leftDataFrameKnowledge: DKnowledge[DataFrame],
rightDataFrameKnowledge: DKnowledge[DataFrame])(
context: InferContext): (DKnowledge[DataFrame], InferenceWarnings) = {
val leftSchema = leftDataFrameKnowledge.single.schema
val rightSchema = rightDataFrameKnowledge.single.schema
if (leftSchema.isDefined && rightSchema.isDefined) {
val (outputSchema, warnings) = inferSchema(leftSchema.get, rightSchema.get)
(DKnowledge(DataFrame.forInference(outputSchema)), warnings)
} else {
(DKnowledge(DataFrame.forInference()), InferenceWarnings.empty)
}
}
protected def inferSchema(
leftSchema: StructType,
rightSchema: StructType): (StructType, InferenceWarnings) = {
(StructType(Seq.empty), InferenceWarnings.empty)
}
}
| deepsense-io/seahorse-workflow-executor | deeplang/src/main/scala/io/deepsense/deeplang/DataFrame2To1Operation.scala | Scala | apache-2.0 | 1,902 |
package edu.depauw.csc.scala.animation;
import javax.swing._
import edu.depauw.csc.scala.graphics._
import java.lang.Thread
import java.awt.BorderLayout, BorderLayout._
import java.util.Date
import java.lang.Math
object BouncingBall extends JFrame {
def main(args: Array[String]): Unit = {
var canvas: AnimatedCanvas = new AnimatedCanvas()
setLayout(new BorderLayout())
add(canvas)
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE)
setSize(640, 480)
setVisible(true)
var time: Double = 0
val start: Date = new Date()
var ball: Circle = Circle(10, 10, 10, Color.RED)
canvas.add(ball)
while(ball.x < 640) {
val current: Date = new Date()
time = (current.getTime() - start.getTime()) / 1000.0
animate(time, ball)
Thread.sleep(1)
canvas.repaint()
}
}
def animate(t: Double, c: Circle): Unit = {
c.y = 420 * (Math.sin(t) * Math.sin(t))
c.x = t * 20
}
}
| bhoward/EscalatorOld | ScalaGraphics/src/edu/depauw/csc/scala/animation/BouncingBall.scala | Scala | apache-2.0 | 981 |
package is.hail.types.physical
import is.hail.asm4s.{Code, Value}
import is.hail.expr.ir.EmitCodeBuilder
import is.hail.types.virtual.{TStruct, Type}
import is.hail.utils._
import scala.collection.JavaConverters._
object PCanonicalStruct {
private val requiredEmpty = PCanonicalStruct(Array.empty[PField], true)
private val optionalEmpty = PCanonicalStruct(Array.empty[PField], false)
def empty(required: Boolean = false): PStruct = if (required) requiredEmpty else optionalEmpty
def apply(required: Boolean, args: (String, PType)*): PCanonicalStruct =
PCanonicalStruct(args
.iterator
.zipWithIndex
.map { case ((n, t), i) => PField(n, t, i) }
.toFastIndexedSeq,
required)
def apply(names: java.util.List[String], types: java.util.List[PType], required: Boolean): PCanonicalStruct = {
val sNames = names.asScala.toArray
val sTypes = types.asScala.toArray
if (sNames.length != sTypes.length)
fatal(s"number of names does not match number of types: found ${ sNames.length } names and ${ sTypes.length } types")
PCanonicalStruct(required, sNames.zip(sTypes): _*)
}
def apply(args: (String, PType)*): PCanonicalStruct =
PCanonicalStruct(false, args:_*)
def canonical(t: Type): PCanonicalStruct = PType.canonical(t).asInstanceOf[PCanonicalStruct]
def canonical(t: PType): PCanonicalStruct = PType.canonical(t).asInstanceOf[PCanonicalStruct]
}
final case class PCanonicalStruct(fields: IndexedSeq[PField], required: Boolean = false) extends PCanonicalBaseStruct(fields.map(_.typ).toArray) with PStruct {
assert(fields.zipWithIndex.forall { case (f, i) => f.index == i })
if (!fieldNames.areDistinct()) {
val duplicates = fieldNames.duplicates()
fatal(s"cannot create struct with duplicate ${plural(duplicates.size, "field")}: " +
s"${fieldNames.map(prettyIdentifier).mkString(", ")}", fieldNames.duplicates())
}
override def setRequired(required: Boolean): PCanonicalStruct = if(required == this.required) this else PCanonicalStruct(fields, required)
override def rename(m: Map[String, String]): PStruct = {
val newFieldsBuilder = new BoxedArrayBuilder[(String, PType)]()
fields.foreach { fd =>
val n = fd.name
newFieldsBuilder += (m.getOrElse(n, n) -> fd.typ)
}
PCanonicalStruct(required, newFieldsBuilder.result(): _*)
}
override def _pretty(sb: StringBuilder, indent: Int, compact: Boolean) {
if (compact) {
sb.append("PCStruct{")
fields.foreachBetween(_.pretty(sb, indent, compact))(sb += ',')
sb += '}'
} else {
if (size == 0)
sb.append("Struct { }")
else {
sb.append("Struct {")
sb += '\\n'
fields.foreachBetween(_.pretty(sb, indent + 4, compact))(sb.append(",\\n"))
sb += '\\n'
sb.append(" " * indent)
sb += '}'
}
}
}
override def loadField(offset: Code[Long], fieldName: String): Code[Long] =
loadField(offset, fieldIdx(fieldName))
override def isFieldMissing(cb: EmitCodeBuilder, offset: Code[Long], field: String): Value[Boolean] =
isFieldMissing(cb, offset, fieldIdx(field))
override def fieldOffset(offset: Code[Long], fieldName: String): Code[Long] =
fieldOffset(offset, fieldIdx(fieldName))
override def setFieldPresent(cb: EmitCodeBuilder, offset: Code[Long], field: String): Unit =
setFieldPresent(cb, offset, fieldIdx(field))
override def setFieldMissing(cb: EmitCodeBuilder, offset: Code[Long], field: String): Unit =
setFieldMissing(cb, offset, fieldIdx(field))
override def insertFields(fieldsToInsert: TraversableOnce[(String, PType)]): PStruct = {
val ab = new BoxedArrayBuilder[PField](fields.length)
var i = 0
while (i < fields.length) {
ab += fields(i)
i += 1
}
val it = fieldsToInsert.toIterator
while (it.hasNext) {
val (name, typ) = it.next
if (fieldIdx.contains(name)) {
val j = fieldIdx(name)
ab(j) = PField(name, typ, j)
} else
ab += PField(name, typ, ab.length)
}
PCanonicalStruct(ab.result(), required)
}
override def deepRename(t: Type): PType = deepRenameStruct(t.asInstanceOf[TStruct])
private def deepRenameStruct(t: TStruct): PStruct = {
PCanonicalStruct((t.fields, this.fields).zipped.map( (tfield, pfield) => {
assert(tfield.index == pfield.index)
PField(tfield.name, pfield.typ.deepRename(tfield.typ), pfield.index)
}), this.required)
}
override def copiedType: PType = {
val copiedTypes = types.map(_.copiedType)
if (types.indices.forall(i => types(i).eq(copiedTypes(i))))
this
else {
PCanonicalStruct(copiedTypes.indices.map(i => fields(i).copy(typ = copiedTypes(i))), required)
}
}
}
| hail-is/hail | hail/src/main/scala/is/hail/types/physical/PCanonicalStruct.scala | Scala | mit | 4,761 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.sql.sources.{DataSourceRegister, StreamSinkProvider}
import org.apache.spark.sql.streaming.OutputMode
class ConsoleSink(options: Map[String, String]) extends Sink with Logging {
// Number of rows to display, by default 20 rows
private val numRowsToShow = options.get("numRows").map(_.toInt).getOrElse(20)
// Truncate the displayed data if it is too long, by default it is true
private val isTruncated = options.get("truncate").map(_.toBoolean).getOrElse(true)
// Track the batch id
private var lastBatchId = -1L
override def addBatch(batchId: Long, data: DataFrame): Unit = synchronized {
val batchIdStr = if (batchId <= lastBatchId) {
s"Rerun batch: $batchId"
} else {
lastBatchId = batchId
s"Batch: $batchId"
}
// scalastyle:off println
println("-------------------------------------------")
println(batchIdStr)
println("-------------------------------------------")
// scalastyle:off println
data.sparkSession.createDataFrame(
data.sparkSession.sparkContext.parallelize(data.collectInternal()), data.schema)
.showInternal(numRowsToShow, isTruncated)
}
}
class ConsoleSinkProvider extends StreamSinkProvider with DataSourceRegister {
def createSink(
sqlContext: SQLContext,
parameters: Map[String, String],
partitionColumns: Seq[String],
outputMode: OutputMode): Sink = {
new ConsoleSink(parameters)
}
def shortName(): String = "console"
}
| saturday-shi/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/console.scala | Scala | apache-2.0 | 2,434 |
import awscala._
import scala.collection.JavaConverters._
import com.amazonaws.services.{ identitymanagement => aws }
object IAM {
def apply(credentials: Credentials): IAM = new IAMClient(BasicCredentialsProvider(credentials.getAWSAccessKeyId, credentials.getAWSSecretKey))
def apply(credentialsProvider: CredentialsProvider = CredentialsLoader.load()): IAM = new IAMClient(credentialsProvider)
def apply(accessKeyId: String, secretAccessKey: String): IAM = {
new IAMClient(BasicCredentialsProvider(accessKeyId, secretAccessKey))
}
}
/**
* Amazon Identity Management Java client wrapper
* @see [[http://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/]]
*/
trait IAM extends aws.AmazonIdentityManagement {
// TODO AccountSummary
// TODO AccountPasswordPolicy
// TODO ServerCertificate
// TODO SigningCertificates
def changePassword(oldPassword: String, newPassword: String): Unit = {
changePassword(new aws.model.ChangePasswordRequest().withOldPassword(oldPassword).withNewPassword(newPassword))
}
// ------------------------------------------
// Account Aliases
// ------------------------------------------
def accountAliases: Seq[String] = listAccountAliases.getAccountAliases.asScala
def createAccountAlias(alias: String): Unit = {
createAccountAlias(new aws.model.CreateAccountAliasRequest().withAccountAlias(alias))
}
def deleteAccountAlias(alias: String): Unit = {
deleteAccountAlias(new aws.model.DeleteAccountAliasRequest().withAccountAlias(alias))
}
// ------------------------------------------
// Groups
// ------------------------------------------
def groups: Seq[Group] = listGroups.getGroups.asScala.map(g => Group(g)).toSeq
def groups(user: User): Seq[Group] = {
listGroupsForUser(new aws.model.ListGroupsForUserRequest().withUserName(user.name))
.getGroups.asScala.map(g => Group(g)).toSeq
}
def group(name: String): Option[Group] = try {
Some(Group(getGroup(new aws.model.GetGroupRequest(name)).getGroup))
} catch { case e: aws.model.NoSuchEntityException => None }
def createGroup(name: String): Group = Group(createGroup(new aws.model.CreateGroupRequest(name)).getGroup)
def updateGroupPath(group: Group, newPath: String): Unit = {
updateGroup(new aws.model.UpdateGroupRequest().withGroupName(group.name).withNewPath(newPath))
}
def updateGroupName(group: Group, newName: String): Unit = {
updateGroup(new aws.model.UpdateGroupRequest().withGroupName(group.name).withNewGroupName(newName))
}
def addUserToGroup(group: Group, user: User): Unit = {
addUserToGroup(new aws.model.AddUserToGroupRequest(group.name, user.name))
}
def removeUserFromGroup(group: Group, user: User): Unit = {
removeUserFromGroup(new aws.model.RemoveUserFromGroupRequest()
.withGroupName(group.name).withUserName(user.name))
}
def delete(group: Group): Unit = deleteGroup(group)
def deleteGroup(group: Group): Unit = deleteGroup(new aws.model.DeleteGroupRequest(group.name))
// ------------------------------------------
// Group Policies
// ------------------------------------------
def policyNames(group: Group) = groupPolicyNames(group)
def groupPolicyNames(group: Group): Seq[String] = {
listGroupPolicies(new aws.model.ListGroupPoliciesRequest().withGroupName(group.name)).getPolicyNames.asScala.toSeq
}
def groupPolicy(group: Group, policyName: String): GroupPolicy = GroupPolicy(
group, getGroupPolicy(new aws.model.GetGroupPolicyRequest().withGroupName(group.name).withPolicyName(policyName))
)
def put(policy: GroupPolicy): Unit = putGroupPolicy(policy)
def putGroupPolicy(policy: GroupPolicy): Unit = putGroupPolicy(policy.group, policy.name, policy.document)
def putGroupPolicy(group: Group, policyName: String, policy: Policy): Unit = {
putGroupPolicy(group, policyName, policy.toJSON)
}
def putGroupPolicy(group: Group, policyName: String, policyDocument: String): Unit = {
putGroupPolicy(new aws.model.PutGroupPolicyRequest()
.withGroupName(group.name)
.withPolicyName(policyName).withPolicyDocument(policyDocument))
}
def delete(policy: GroupPolicy): Unit = deleteGroupPolicy(policy)
def deleteGroupPolicy(policy: GroupPolicy): Unit = {
deleteGroupPolicy(
new aws.model.DeleteGroupPolicyRequest().withGroupName(policy.group.name).withPolicyName(policy.name)
)
}
// ------------------------------------------
// Users
// ------------------------------------------
def users: Seq[User] = listUsers.getUsers.asScala.map(u => User(u)).toSeq
def user(name: String): Option[User] = try {
Option(User(getUser(new aws.model.GetUserRequest().withUserName(name)).getUser))
} catch { case e: aws.model.NoSuchEntityException => None }
def createUser(name: String): User = User(createUser(new aws.model.CreateUserRequest(name)).getUser)
def updateUserName(user: User, newName: String): Unit = {
updateUser(new aws.model.UpdateUserRequest().withUserName(user.name).withNewUserName(newName))
}
def updateUserPath(user: User, newPath: String): Unit = {
updateUser(new aws.model.UpdateUserRequest().withUserName(user.name).withNewPath(newPath))
}
def delete(user: User): Unit = deleteUser(user)
def deleteUser(user: User): Unit = deleteUser(new aws.model.DeleteUserRequest(user.name))
// ------------------------------------------
// User Policies
// ------------------------------------------
def policyNames(user: User) = userPolicyNames(user)
def userPolicyNames(user: User): Seq[String] = {
listUserPolicies(new aws.model.ListUserPoliciesRequest().withUserName(user.name)).getPolicyNames.asScala.toSeq
}
def put(policy: UserPolicy): Unit = putUserPolicy(policy)
def putUserPolicy(policy: UserPolicy): Unit = putUserPolicy(policy.user, policy.name, policy.document)
def putUserPolicy(user: User, policyName: String, policyDocument: String): Unit = {
putUserPolicy(new aws.model.PutUserPolicyRequest()
.withUserName(user.name)
.withPolicyName(policyName).withPolicyDocument(policyDocument))
}
def userPolicy(user: User, policyName: String): Option[UserPolicy] = try {
Option(UserPolicy(
user, getUserPolicy(new aws.model.GetUserPolicyRequest().withUserName(user.name).withPolicyName(policyName))
))
} catch { case e: aws.model.NoSuchEntityException => None }
def delete(policy: UserPolicy): Unit = deleteUserPolicy(policy)
def deleteUserPolicy(policy: UserPolicy): Unit = {
deleteUserPolicy(
new aws.model.DeleteUserPolicyRequest().withUserName(policy.user.name).withPolicyName(policy.name)
)
}
// ------------------------------------------
// Access Keys
// ------------------------------------------
def accessKeys: Seq[AccessKey] = listAccessKeys.getAccessKeyMetadata.asScala.map(meta => AccessKey(meta)).toSeq
def accessKeys(user: User): Seq[AccessKey] = {
listAccessKeys(new aws.model.ListAccessKeysRequest().withUserName(user.name)).getAccessKeyMetadata
.asScala.map(meta => AccessKey(meta)).toSeq
}
def createAccessKey(user: User): AccessKey = {
AccessKey(createAccessKey(new aws.model.CreateAccessKeyRequest().withUserName(user.name)).getAccessKey)
}
def activateAccessKey(accessKey: AccessKey): Unit = {
updateAccessKey(new aws.model.UpdateAccessKeyRequest()
.withAccessKeyId(accessKey.accessKeyId).withStatus(aws.model.StatusType.Active))
}
def inactivateAccessKey(accessKey: AccessKey): Unit = {
updateAccessKey(new aws.model.UpdateAccessKeyRequest()
.withAccessKeyId(accessKey.accessKeyId).withStatus(aws.model.StatusType.Inactive))
}
def delete(accessKey: AccessKey) = deleteAccessKey(accessKey)
def deleteAccessKey(accessKey: AccessKey): Unit = {
deleteAccessKey(new aws.model.DeleteAccessKeyRequest(accessKey.userName, accessKey.accessKeyId))
}
// ------------------------------------------
// Roles
// ------------------------------------------
def roles: Seq[Role] = listRoles.getRoles.asScala.map(r => Role(r)).toSeq
def createRole(name: String, path: String, assumeRolePolicy: Policy): Role = {
createRole(name, path, assumeRolePolicy.toJSON)
}
def createRole(name: String, path: String, assumeRolePolicyDocument: String): Role = {
Role(createRole(new aws.model.CreateRoleRequest()
.withRoleName(name)
.withPath(path)
.withAssumeRolePolicyDocument(assumeRolePolicyDocument)).getRole)
}
def delete(role: Role): Unit = deleteRole(role)
def deleteRole(role: Role): Unit = {
deleteRole(new aws.model.DeleteRoleRequest().withRoleName(role.name))
}
// ------------------------------------------
// Role Policies
// ------------------------------------------
def policyNames(role: Role) = rolePolicyNames(role)
def rolePolicyNames(role: Role): Seq[String] = {
listRolePolicies(new aws.model.ListRolePoliciesRequest().withRoleName(role.name)).getPolicyNames.asScala.toSeq
}
def put(policy: RolePolicy): Unit = putRolePolicy(policy)
def putRolePolicy(policy: RolePolicy): Unit = putRolePolicy(policy.role, policy.name, policy.document)
def putRolePolicy(role: Role, policyName: String, policy: Policy): Unit = {
putRolePolicy(role, policyName, policy.toJSON)
}
def putRolePolicy(role: Role, policyName: String, policyDocument: String): Unit = {
putRolePolicy(new aws.model.PutRolePolicyRequest()
.withRoleName(role.name)
.withPolicyName(policyName).withPolicyDocument(policyDocument))
}
def rolePolicy(role: Role, policyName: String): RolePolicy = RolePolicy(
role, getRolePolicy(new aws.model.GetRolePolicyRequest().withRoleName(role.name).withPolicyName(policyName))
)
def delete(policy: RolePolicy): Unit = deleteRolePolicy(policy)
def deleteRolePolicy(policy: RolePolicy): Unit = {
deleteRolePolicy(
new aws.model.DeleteRolePolicyRequest().withRoleName(policy.role.name).withPolicyName(policy.name)
)
}
// ------------------------------------------
// Instance Profiles
// ------------------------------------------
def instanceProfiles: Seq[InstanceProfile] = {
listInstanceProfiles.getInstanceProfiles.asScala.map(p => InstanceProfile(p)).toSeq
}
def instanceProfiles(role: Role): Seq[InstanceProfile] = {
listInstanceProfilesForRole(new aws.model.ListInstanceProfilesForRoleRequest().withRoleName(role.name))
.getInstanceProfiles.asScala.map(p => InstanceProfile(p)).toSeq
}
def createInstanceProfile(name: String, path: String): InstanceProfile = {
InstanceProfile(createInstanceProfile(
new aws.model.CreateInstanceProfileRequest().withInstanceProfileName(name).withPath(path)
).getInstanceProfile)
}
def addRoleToInstanceProfile(profile: InstanceProfile, role: Role): Unit = {
addRoleToInstanceProfile(new aws.model.AddRoleToInstanceProfileRequest()
.withInstanceProfileName(profile.name).withRoleName(role.name))
}
def removeRoleFromInstanceProfile(profile: InstanceProfile, role: Role): Unit = {
removeRoleFromInstanceProfile(new aws.model.RemoveRoleFromInstanceProfileRequest()
.withInstanceProfileName(profile.name).withRoleName(role.name))
}
def delete(profile: InstanceProfile): Unit = deleteInstanceProfile(profile)
def deleteInstanceProfile(profile: InstanceProfile): Unit = {
deleteInstanceProfile(
new aws.model.DeleteInstanceProfileRequest().withInstanceProfileName(profile.name)
)
}
// ------------------------------------------
// Login Profiles
// ------------------------------------------
def createLoginProfile(user: User, password: String): LoginProfile = {
LoginProfile(
user,
createLoginProfile(new aws.model.CreateLoginProfileRequest().withUserName(user.name).withPassword(password)).getLoginProfile
)
}
def loginProfile(user: User): Option[LoginProfile] = try {
Option(LoginProfile(user, getLoginProfile(new aws.model.GetLoginProfileRequest().withUserName(user.name)).getLoginProfile))
} catch { case e: aws.model.NoSuchEntityException => None }
def changeUserPassword(profile: LoginProfile, newPassword: String): Unit = {
updateLoginProfile(new aws.model.UpdateLoginProfileRequest()
.withUserName(profile.user.name).withPassword(newPassword))
}
def delete(profile: LoginProfile): Unit = deleteLoginProfile(profile)
def deleteLoginProfile(profile: LoginProfile): Unit = {
deleteLoginProfile(
new aws.model.DeleteLoginProfileRequest().withUserName(profile.user.name)
)
}
// ------------------------------------------
// Virtual MFA Devices
// ------------------------------------------
def virtualMFADevices: Seq[VirtualMFADevice] = {
listVirtualMFADevices.getVirtualMFADevices.asScala.map(d => VirtualMFADevice(d)).toSeq
}
def virtualMFADevices(user: User): Seq[VirtualMFADevice] = {
listMFADevices(new aws.model.ListMFADevicesRequest().withUserName(user.name)).getMFADevices.asScala
.map(d => VirtualMFADevice(user, d)).toSeq
}
def createVirtualMFADevice(name: String, path: String): VirtualMFADevice = {
VirtualMFADevice(createVirtualMFADevice(
new aws.model.CreateVirtualMFADeviceRequest().withVirtualMFADeviceName(name).withPath(path)
).getVirtualMFADevice)
}
def enableVirtualMFADevice(device: VirtualMFADevice, user: User, authCode1: String, authCode2: String) = {
enableMFADevice(
new aws.model.EnableMFADeviceRequest().withUserName(user.name).withSerialNumber(device.serialNumber)
.withAuthenticationCode1(authCode1).withAuthenticationCode2(authCode2)
)
}
def disableVirtualMFADevice(device: VirtualMFADevice, user: User): Unit = {
deactivateMFADevice(
new aws.model.DeactivateMFADeviceRequest().withSerialNumber(device.serialNumber).withUserName(user.name)
)
}
def delete(device: VirtualMFADevice): Unit = deleteVirtualMFADevice(device)
def deleteVirtualMFADevice(device: VirtualMFADevice): Unit = {
deleteVirtualMFADevice(new aws.model.DeleteVirtualMFADeviceRequest().withSerialNumber(device.serialNumber))
}
}
/**
* Default Implementation
*
* @param credentialsProvider credentialsProvider
*/
class IAMClient(credentialsProvider: CredentialsProvider = CredentialsLoader.load())
extends aws.AmazonIdentityManagementClient(credentialsProvider)
with IAM
| hirokikonishi/awscala | aws/iam/src/main/scala/IAM.scala | Scala | apache-2.0 | 14,347 |
import scala.quoted.*
object Macros {
inline def liftString(inline a: DSL): String = ${implStringNum('a)}
private def implStringNum(a: Expr[DSL])(using Quotes): Expr[String] =
impl(StringNum, a)
inline def liftCompute(inline a: DSL): Int = ${implComputeNum('a)}
private def implComputeNum(a: Expr[DSL])(using Quotes): Expr[Int] =
impl(ComputeNum, a)
inline def liftAST(inline a: DSL): ASTNum = ${implASTNum('a)}
private def implASTNum(a: Expr[DSL])(using Quotes): Expr[ASTNum] =
impl(ASTNum, a)
private def impl[T: Type](sym: Symantics[T], a: Expr[DSL])(using Quotes): Expr[T] = {
def lift(e: Expr[DSL])(using env: Map[Int, Expr[T]])(using Quotes): Expr[T] = e match {
case '{ LitDSL(${Expr(c)}) } => sym.value(c)
case '{ ($x: DSL) + ($y: DSL) } => sym.plus(lift(x), lift(y))
case '{ ($x: DSL) * ($y: DSL) } => sym.times(lift(x), lift(y))
case '{ ${f}($x: DSL): DSL } => sym.app(liftFun(f), lift(x))
case '{ val x: DSL = $value; $bodyFn(x): DSL } =>
UnsafeExpr.open(bodyFn) { (body1, close) =>
val (i, nEnvVar) = freshEnvVar()
lift(close(body1)(nEnvVar))(using env + (i -> lift(value)))
}
case '{ envVar(${Expr(i)}) } => env(i)
case _ =>
import quotes.reflect.*
report.error("Expected explicit DSL " + e.show, e.asTerm.pos)
???
}
def liftFun(e: Expr[DSL => DSL])(using env: Map[Int, Expr[T]])(using Quotes): Expr[T => T] = e match {
case '{ (x: DSL) => $bodyFn(x): DSL } =>
sym.lam((y: Expr[T]) =>
UnsafeExpr.open(bodyFn) { (body1, close) =>
val (i, nEnvVar) = freshEnvVar()
lift(close(body1)(nEnvVar))(using env + (i -> y))
}
)
case _ =>
import quotes.reflect.*
report.error("Expected explicit DSL => DSL " + e.show, e.asTerm.pos)
???
}
lift(a)(using Map.empty)
}
}
object UnsafeExpr {
def open[T1, R, X](f: Expr[T1 => R])(content: (Expr[R], [t] => Expr[t] => Expr[T1] => Expr[t]) => X)(using Quotes): X = {
import quotes.reflect.*
val (params, bodyExpr) = paramsAndBody[R](f)
content(bodyExpr, [t] => (e: Expr[t]) => (v: Expr[T1]) => bodyFn[t](e.asTerm, params, List(v.asTerm)).asExpr.asInstanceOf[Expr[t]])
}
private def paramsAndBody[R](using Quotes)(f: Expr[Any]): (List[quotes.reflect.ValDef], Expr[R]) = {
import quotes.reflect.*
val Block(List(DefDef("$anonfun", List(TermParamClause(params)), _, Some(body))), Closure(Ident("$anonfun"), None)) = f.asTerm.etaExpand(Symbol.spliceOwner)
(params, body.asExpr.asInstanceOf[Expr[R]])
}
private def bodyFn[t](using Quotes)(e: quotes.reflect.Term, params: List[quotes.reflect.ValDef], args: List[quotes.reflect.Term]): quotes.reflect.Term = {
import quotes.reflect.*
val map = params.map(_.symbol).zip(args).toMap
new TreeMap {
override def transformTerm(tree: Term)(owner: Symbol): Term =
super.transformTerm(tree)(owner) match
case tree: Ident => map.getOrElse(tree.symbol, tree)
case tree => tree
}.transformTerm(e)(Symbol.spliceOwner)
}
}
def freshEnvVar()(using Quotes): (Int, Expr[DSL]) = {
v += 1
(v, '{envVar(${Expr(v)})})
}
var v = 0
def envVar(i: Int): DSL = ???
//
// DSL in which the user write the code
//
trait DSL {
def + (x: DSL): DSL = ???
def * (x: DSL): DSL = ???
}
case class LitDSL(x: Int) extends DSL
//
// Interpretation of the DSL
//
trait Symantics[Num] {
def value(x: Int)(using Quotes): Expr[Num]
def plus(x: Expr[Num], y: Expr[Num])(using Quotes): Expr[Num]
def times(x: Expr[Num], y: Expr[Num])(using Quotes): Expr[Num]
def app(f: Expr[Num => Num], x: Expr[Num])(using Quotes): Expr[Num]
def lam(body: Quotes ?=> Expr[Num] => Expr[Num])(using Quotes): Expr[Num => Num]
}
object StringNum extends Symantics[String] {
def value(x: Int)(using Quotes): Expr[String] = Expr(x.toString)
def plus(x: Expr[String], y: Expr[String])(using Quotes): Expr[String] = '{ s"${$x} + ${$y}" } // '{ x + " + " + y }
def times(x: Expr[String], y: Expr[String])(using Quotes): Expr[String] = '{ s"${$x} * ${$y}" }
def app(f: Expr[String => String], x: Expr[String])(using Quotes): Expr[String] = Expr.betaReduce('{ $f($x) })
def lam(body: Quotes ?=> Expr[String] => Expr[String])(using Quotes): Expr[String => String] = '{ (x: String) => ${body('x)} }
}
object ComputeNum extends Symantics[Int] {
def value(x: Int)(using Quotes): Expr[Int] = Expr(x)
def plus(x: Expr[Int], y: Expr[Int])(using Quotes): Expr[Int] = '{ $x + $y }
def times(x: Expr[Int], y: Expr[Int])(using Quotes): Expr[Int] = '{ $x * $y }
def app(f: Expr[Int => Int], x: Expr[Int])(using Quotes): Expr[Int] = '{ $f($x) }
def lam(body: Quotes ?=> Expr[Int] => Expr[Int])(using Quotes): Expr[Int => Int] = '{ (x: Int) => ${body('x)} }
}
object ASTNum extends Symantics[ASTNum] {
def value(x: Int)(using Quotes): Expr[ASTNum] = '{ LitAST(${Expr(x)}) }
def plus(x: Expr[ASTNum], y: Expr[ASTNum])(using Quotes): Expr[ASTNum] = '{ PlusAST($x, $y) }
def times(x: Expr[ASTNum], y: Expr[ASTNum])(using Quotes): Expr[ASTNum] = '{ TimesAST($x, $y) }
def app(f: Expr[ASTNum => ASTNum], x: Expr[ASTNum])(using Quotes): Expr[ASTNum] = '{ AppAST($f, $x) }
def lam(body: Quotes ?=> Expr[ASTNum] => Expr[ASTNum])(using Quotes): Expr[ASTNum => ASTNum] = '{ (x: ASTNum) => ${body('x)} }
}
trait ASTNum
case class LitAST(x: Int) extends ASTNum
case class PlusAST(x: ASTNum, y: ASTNum) extends ASTNum
case class TimesAST(x: ASTNum, y: ASTNum) extends ASTNum
case class AppAST(x: ASTNum => ASTNum, y: ASTNum) extends ASTNum {
override def toString: String = s"AppAST(<lambda>, $y)"
}
| dotty-staging/dotty | tests/run-macros/quote-matcher-symantics-2/quoted_1.scala | Scala | apache-2.0 | 5,702 |
package im.actor.server.migrations
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import sql.migration.V20151108011300__FillUserSequence
import scala.concurrent.duration._
import scala.concurrent.Future
object FillUserSequenceMigrator extends Migration {
override protected def migrationName: String = "2015-11-11-FillUserSequence"
override protected def migrationTimeout: Duration = 24.hours
override protected def startMigration()(implicit system: ActorSystem): Future[Unit] = {
import system.dispatcher
implicit val mat = ActorMaterializer()
val migration = new V20151108011300__FillUserSequence
Future(migration.migrate())
}
}
| EaglesoftZJ/actor-platform | actor-server/actor-core/src/main/scala/im/actor/server/migrations/FillUserSequenceMigrator.scala | Scala | agpl-3.0 | 680 |
package debop4s.web.spring
import debop4s.core.utils.Hashs
import debop4s.core.{ToStringHelper, ValueObjectBase}
import scala.beans.BeanProperty
/**
* API 결과를 나타내는 클래스입니다.
*
* @author Sunghyouk Bae
*/
class ApiResult(@BeanProperty var header: ApiHeader,
@BeanProperty var body: Any) extends ValueObjectBase {
def this() = this(ApiHeader(), "")
def this(body: Any) = this(ApiHeader(), body)
override def hashCode: Int = Hashs.compute(header, body)
override protected def buildStringHelper: ToStringHelper =
super.buildStringHelper
.add("header", header)
.add("body", body)
}
object ApiResult {
def apply(): ApiResult = new ApiResult()
def apply(body: Any): ApiResult = new ApiResult(body)
def apply(code: Int, message: String): ApiResult = new ApiResult(ApiHeader(code, message), "")
}
| debop/debop4s | debop4s-web-spring/src/main/scala/debop4s/web/spring/ApiResult.scala | Scala | apache-2.0 | 867 |
package dk.tennisprob.tournament
trait TournamentProbCalc {
/**
*
* @param draw First round pairs
* @param winProb(player1,player2) Probability of winning a tennis match by player 1 against player 2
*
* @returns Map[player,winning probabilities]
*/
def winningProbs(draw: Seq[Tuple2[String, String]], winProb: (String, String) => Double): Map[String, Double]
} | danielkorzekwa/tennis-probability-calculator | src/main/scala/dk/tennisprob/tournament/TournamentProbCalc.scala | Scala | bsd-2-clause | 398 |
package ecommerce.sales.view
import java.sql.Date
import ecommerce.sales.ReservationStatus._
import ecommerce.sales._
import org.joda.time.DateTime.now
import pl.newicom.dddd.messaging.event.OfficeEventMessage
import pl.newicom.dddd.view.sql.Projection
import pl.newicom.dddd.view.sql.Projection.ProjectionAction
import slick.dbio.DBIOAction
import slick.dbio.Effect.Write
import scala.concurrent.ExecutionContext
class ReservationProjection(dao: ReservationDao)(implicit ec: ExecutionContext) extends Projection {
override def consume(eventMessage: OfficeEventMessage): ProjectionAction[Write] = {
eventMessage.event match {
case ReservationCreated(id, clientId) =>
val newView = ReservationView(id.value, clientId, Opened, new Date(now().getMillis))
dao.createOrUpdate(newView)
case ReservationConfirmed(id, clientId, _) =>
dao.updateStatus(id.value, Confirmed)
case ReservationCanceled(id) =>
dao.updateStatus(id.value, Canceled)
case ReservationClosed(id) =>
dao.updateStatus(id.value, Closed)
case ProductReserved(id, product, quantity) =>
// TODO handle
DBIOAction.successful(())
}
}
} | pawelkaczor/ddd-leaven-akka-v2 | sales/read-back/src/main/scala/ecommerce/sales/view/ReservationProjection.scala | Scala | mit | 1,201 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.