code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package scodec
package codecs
class DiscriminatorCodecTest extends CodecSuite {
"the discriminator combinators" should {
"support building a codec using typecases" in {
val codec =
discriminated[AnyVal].by(uint8)
.typecase(0, int32)
.typecase(1, bool)
roundtrip(codec, true)
roundtrip(codec, false)
roundtrip(codec, 1)
roundtrip(codec, Int.MaxValue)
codec.sizeBound shouldBe SizeBound.bounded(9, 40)
}
"support building a codec using partial functions and subtyping" in {
val codec =
discriminated[AnyVal].by(uint8)
.\ (0) { case i: Int => i } (int32)
.\ (1) { case b: Boolean => b } (bool)
roundtrip(codec, true)
roundtrip(codec, false)
roundtrip(codec, 1)
roundtrip(codec, Int.MaxValue)
}
"support building a codec using A => Option[B] and subtyping" in {
val codec =
discriminated[AnyVal].by(uint8)
./ (0) { v => v match { case i: Int => Some(i); case _ => None }} (int32)
./ (1) { v => v match { case b: Boolean => Some(b); case _ => None }} (bool)
roundtrip(codec, true)
roundtrip(codec, false)
roundtrip(codec, 1)
roundtrip(codec, Int.MaxValue)
}
"support building a codec for an enumeration" in {
sealed trait Direction
case object North extends Direction
case object South extends Direction
case object East extends Direction
case object West extends Direction
val codec = mappedEnum(uint8, North -> 1, South -> 2, East -> 3, West -> 4)
roundtrip(codec, North)
roundtrip(codec, South)
roundtrip(codec, East)
roundtrip(codec, West)
}
"support building a codec for an enumeration with preserved reserved values" in {
trait Color
case object Red extends Color
case object Green extends Color
case object Blue extends Color
case class Reserved(value: Int) extends Color
val nonReserved: Codec[Color] = mappedEnum(uint8, Red -> 1, Green -> 2, Blue -> 3)
val reserved: Codec[Reserved] = uint8.widenOpt(Reserved.apply, Reserved.unapply)
val codec: Codec[Color] = choice(nonReserved, reserved.upcast[Color])
roundtrip(codec, Red)
roundtrip(codec, Green)
roundtrip(codec, Blue)
roundtrip(codec, Reserved(255))
roundtrip(codec, Reserved(4))
}
"support building a codec for an enumeration with preserved reserved values, and reserved values are not in the type hierarchy" in {
trait Color
case object Red extends Color
case object Green extends Color
case object Blue extends Color
case class Reserved(value: Int)
val nonReserved: Codec[Color] = mappedEnum(uint8, Red -> 1, Green -> 2, Blue -> 3)
val reserved: Codec[Reserved] = uint8.widenOpt(Reserved.apply, Reserved.unapply)
val codec: Codec[Either[Reserved, Color]] = choice(
nonReserved.xmap[Right[Reserved, Color]](c => Right(c), _.b).upcast[Either[Reserved, Color]],
reserved.xmap[Left[Reserved, Color]](r => Left(r), _.a).upcast[Either[Reserved, Color]]
)
roundtrip(codec, Right(Red))
roundtrip(codec, Right(Green))
roundtrip(codec, Right(Blue))
roundtrip(codec, Left(Reserved(255)))
roundtrip(codec, Left(Reserved(4)))
}
"support building a codec for an ADT" in {
sealed trait Direction
case object Stay extends Direction
case class Go(units: Int) extends Direction
val stayCodec = provide(Stay)
val goCodec = int32.widenOpt[Go](Go.apply, Go.unapply)
val codec =
discriminated[Direction].by(uint8).
typecase(0, stayCodec).
typecase(1, goCodec)
roundtrip(codec, Stay)
roundtrip(codec, Go(42))
}
"support building a codec for recusive ADTs - e.g., trees" in {
sealed trait Tree
case class Node(l: Tree, r: Tree) extends Tree
case class Leaf(n: Int) extends Tree
def treeCodec: Codec[Tree] = lazily {
discriminated[Tree].by(bool)
.| (false) { case l @ Leaf(n) => n } (Leaf.apply) (int32)
.| (true) { case n @ Node(l, r) => (l, r) } { case (l, r) => Node(l, r) } (treeCodec ~ treeCodec)
}
roundtrip(treeCodec, Leaf(42))
roundtrip(treeCodec, Node(Leaf(42), Node(Leaf(1), Leaf(2))))
}
"error when matching discriminator for encoding is not found" in {
val codec =
discriminated[AnyVal].by(uint8)
.typecase(0, bool)
roundtrip(codec, true)
roundtrip(codec, false)
encodeError(codec, 1, new Err.MatchingDiscriminatorNotFound(1))
encodeError(codec, Int.MaxValue, new Err.MatchingDiscriminatorNotFound(Int.MaxValue))
}
"support framing value codecs" in {
sealed trait Direction
case object Stay extends Direction
case class Go(units: Int) extends Direction
case class Annotate(message: String) extends Direction
val stayCodec = provide(Stay)
val goCodec = int32.widenOpt[Go](Go.apply, Go.unapply)
val annotateCodec = ascii.widenOpt[Annotate](Annotate.apply, Annotate.unapply)
val codec =
discriminated[Direction].by(uint8).
typecase(0, stayCodec).
typecase(1, goCodec).
typecase(2, annotateCodec).
framing(new CodecTransformation {
def apply[X](c: Codec[X]) = variableSizeBytes(uint8, c)
})
roundtrip(list(codec), List(Stay, Go(1), Annotate("Hello"), Go(2), Stay))
}
}
}
|
alissapajer/scodec
|
shared/src/test/scala/scodec/codecs/DiscriminatorCodecTest.scala
|
Scala
|
bsd-3-clause
| 5,550
|
package lila.tournament
import org.joda.time.DateTime
import org.joda.time.format.ISODateTimeFormat
import play.api.libs.json._
import scala.concurrent.duration._
import chess.Clock.{ Config => TournamentClock }
import lila.common.LightUser
import lila.common.PimpedJson._
import lila.game.{ Game, GameRepo, Pov }
import lila.quote.Quote.quoteWriter
import lila.rating.PerfType
import lila.user.User
final class JsonView(
getLightUser: String => Option[LightUser],
cached: Cached,
performance: Performance,
verify: Condition.Verify) {
import JsonView._
import Condition.JSONHandlers._
private case class CachableData(
pairings: JsArray,
featured: Option[JsObject],
podium: Option[JsArray],
next: Option[JsObject])
def apply(
tour: Tournament,
page: Option[Int],
me: Option[User],
playerInfoExt: Option[PlayerInfoExt],
socketVersion: Option[Int]): Fu[JsObject] = for {
data <- cachableData(tour.id)
myInfo <- me ?? { u => PlayerRepo.playerInfo(tour.id, u.id) }
stand <- (myInfo, page) match {
case (_, Some(p)) => standing(tour, p)
case (Some(i), _) => standing(tour, i.page)
case _ => standing(tour, 1)
}
playerInfoJson <- playerInfoExt ?? { pie =>
playerInfo(pie).map(_.some)
}
verdicts <- me match {
case None => fuccess(tour.conditions.accepted)
case Some(user) if myInfo.isDefined => fuccess(tour.conditions.accepted)
case Some(user) => verify(tour.conditions, user)
}
} yield Json.obj(
"id" -> tour.id,
"createdBy" -> tour.createdBy,
"system" -> tour.system.toString.toLowerCase,
"fullName" -> tour.fullName,
"greatPlayer" -> GreatPlayer.wikiUrl(tour.name).map { url =>
Json.obj("name" -> tour.name, "url" -> url)
},
"perf" -> tour.perfType,
"nbPlayers" -> tour.nbPlayers,
"minutes" -> tour.minutes,
"clock" -> tour.clock,
"position" -> tour.position.some.filterNot(_.initial).map(positionJson),
"private" -> tour.`private`.option(true),
"verdicts" -> verdicts,
"variant" -> tour.variant.key,
"isStarted" -> tour.isStarted,
"isFinished" -> tour.isFinished,
"isRecentlyFinished" -> tour.isRecentlyFinished.option(true),
"pairingsClosed" -> tour.pairingsClosed,
"schedule" -> tour.schedule.map(scheduleJson),
"secondsToFinish" -> tour.isStarted.option(tour.secondsToFinish),
"secondsToStart" -> tour.isCreated.option(tour.secondsToStart),
"startsAt" -> formatDate(tour.startsAt),
"pairings" -> data.pairings,
"standing" -> stand,
"me" -> myInfo.map(myInfoJson(me)),
"featured" -> data.featured,
"podium" -> data.podium,
"playerInfo" -> playerInfoJson,
"quote" -> tour.isCreated.option(lila.quote.Quote.one(tour.id)),
"spotlight" -> tour.spotlight,
"socketVersion" -> socketVersion,
"next" -> data.next
).noNull
def standing(tour: Tournament, page: Int): Fu[JsObject] =
if (page == 1) firstPageCache(tour.id)
else computeStanding(tour, page)
def clearCache(id: String) =
firstPageCache.remove(id) >> cachableData.remove(id)
def playerInfo(info: PlayerInfoExt): Fu[JsObject] = for {
ranking <- cached ranking info.tour
pairings <- PairingRepo.finishedByPlayerChronological(info.tour.id, info.user.id)
sheet = info.tour.system.scoringSystem.sheet(info.tour, info.user.id, pairings)
tpr <- performance(info.tour, info.player, pairings)
} yield info match {
case PlayerInfoExt(tour, user, player, povs) =>
val isPlaying = povs.headOption.??(_.game.playable)
val povScores: List[(Pov, Option[Score])] = povs zip {
(isPlaying ?? List(none[Score])) ::: sheet.scores.map(some)
}
Json.obj(
"player" -> Json.obj(
"id" -> user.id,
"name" -> user.username,
"title" -> user.title,
"rank" -> ranking.get(user.id).map(1+),
"rating" -> player.rating,
"provisional" -> player.provisional.option(true),
"withdraw" -> player.withdraw.option(true),
"score" -> player.score,
"ratingDiff" -> player.ratingDiff,
"fire" -> player.fire,
"nb" -> sheetNbs(user.id, sheet, pairings),
"performance" -> tpr
).noNull,
"pairings" -> povScores.map {
case (pov, score) =>
Json.obj(
"id" -> pov.gameId,
"color" -> pov.color.name,
"op" -> gameUserJson(pov.opponent.userId, pov.opponent.rating),
"win" -> pov.win,
"status" -> pov.game.status.id,
"berserk" -> pov.player.berserk.option(true),
"score" -> score.map(sheetScoreJson)
).noNull
}
)
}
private def fetchFeaturedGame(tour: Tournament): Fu[Option[FeaturedGame]] =
tour.featuredId.ifTrue(tour.isStarted) ?? PairingRepo.byId flatMap {
_ ?? { pairing =>
GameRepo game pairing.gameId flatMap {
_ ?? { game =>
cached ranking tour flatMap { ranking =>
PlayerRepo.pairByTourAndUserIds(tour.id, pairing.user1, pairing.user2) map { pairOption =>
for {
players <- pairOption
(p1, p2) = players
rp1 <- RankedPlayer(ranking)(p1)
rp2 <- RankedPlayer(ranking)(p2)
} yield FeaturedGame(game, rp1, rp2)
}
}
}
}
}
}
private def sheetNbs(userId: String, sheet: ScoreSheet, pairings: Pairings) = sheet match {
case s: arena.ScoringSystem.Sheet => Json.obj(
"game" -> s.scores.size,
"berserk" -> pairings.foldLeft(0) {
case (nb, p) => nb + p.berserkOf(userId)
},
"win" -> s.scores.count(_.isWin))
}
private def computeStanding(tour: Tournament, page: Int): Fu[JsObject] = for {
rankedPlayers <- PlayerRepo.bestByTourWithRankByPage(tour.id, 10, page max 1)
sheets <- rankedPlayers.map { p =>
PairingRepo.finishedByPlayerChronological(tour.id, p.player.userId) map { pairings =>
p.player.userId -> tour.system.scoringSystem.sheet(tour, p.player.userId, pairings)
}
}.sequenceFu.map(_.toMap)
} yield Json.obj(
"page" -> page,
"players" -> rankedPlayers.map(playerJson(sheets, tour))
)
private val firstPageCache = lila.memo.AsyncCache[String, JsObject](
name = "tournament.firstPage",
id => TournamentRepo byId id flatten s"No such tournament: $id" flatMap { computeStanding(_, 1) },
timeToLive = 1 second)
private val cachableData = lila.memo.AsyncCache[String, CachableData](
name = "tournament.json.cachable",
id =>
for {
pairings <- PairingRepo.recentByTour(id, 40)
tour <- TournamentRepo byId id
featured <- tour ?? fetchFeaturedGame
podium <- podiumJson(id)
next <- tour.filter(_.isFinished) ?? cached.findNext map2 nextJson
} yield CachableData(
pairings = JsArray(pairings map pairingJson),
featured = featured map featuredJson,
podium = podium,
next = next),
timeToLive = 1 second)
private def nextJson(tour: Tournament) = Json.obj(
"id" -> tour.id,
"name" -> tour.fullName,
"perf" -> tour.perfType,
"nbPlayers" -> tour.nbPlayers,
"finishesAt" -> tour.isStarted.option(tour.finishesAt).map(formatDate),
"startsAt" -> tour.isCreated.option(tour.startsAt).map(formatDate))
private def featuredJson(featured: FeaturedGame) = {
val game = featured.game
def ofPlayer(rp: RankedPlayer, p: lila.game.Player) = {
val light = getLightUser(rp.player.userId)
Json.obj(
"rank" -> rp.rank,
"name" -> light.fold(rp.player.userId)(_.name),
"title" -> light.flatMap(_.title),
"rating" -> rp.player.rating,
"ratingDiff" -> rp.player.ratingDiff,
"berserk" -> p.berserk.option(true)
).noNull
}
Json.obj(
"id" -> game.id,
"fen" -> (chess.format.Forsyth exportBoard game.toChess.board),
"color" -> (game.variant match {
case chess.variant.RacingKings => chess.White
case _ => game.firstColor
}).name,
"lastMove" -> ~game.castleLastMoveTime.lastMoveString,
"white" -> ofPlayer(featured.white, game player chess.White),
"black" -> ofPlayer(featured.black, game player chess.Black))
}
private def myInfoJson(u: Option[User])(i: PlayerInfo) = Json.obj(
"rank" -> i.rank,
"withdraw" -> i.withdraw,
"username" -> u.map(_.titleUsername))
private def gameUserJson(player: lila.game.Player): JsObject =
gameUserJson(player.userId, player.rating)
private def gameUserJson(userId: Option[String], rating: Option[Int]): JsObject = {
val light = userId flatMap getLightUser
Json.obj(
"name" -> light.map(_.name),
"title" -> light.flatMap(_.title),
"rating" -> rating
).noNull
}
private def sheetJson(sheet: ScoreSheet) = sheet match {
case s: arena.ScoringSystem.Sheet =>
val o = Json.obj(
"scores" -> s.scores.reverse.map(arenaSheetScoreJson),
"total" -> s.total)
s.onFire.fold(o + ("fire" -> JsBoolean(true)), o)
}
private def arenaSheetScoreJson(score: arena.ScoringSystem.Score) =
if (score.flag == arena.ScoringSystem.Normal) JsNumber(score.value)
else Json.arr(score.value, score.flag.id)
private def sheetScoreJson(score: Score) = score match {
case s: arena.ScoringSystem.Score => arenaSheetScoreJson(s)
case s => JsNumber(score.value)
}
private def playerJson(sheets: Map[String, ScoreSheet], tour: Tournament)(rankedPlayer: RankedPlayer): JsObject =
playerJson(sheets get rankedPlayer.player.userId, tour, rankedPlayer)
private def playerJson(sheet: Option[ScoreSheet], tour: Tournament, rankedPlayer: RankedPlayer): JsObject = {
val p = rankedPlayer.player
val light = getLightUser(p.userId)
Json.obj(
"rank" -> rankedPlayer.rank,
"name" -> light.fold(p.userId)(_.name),
"title" -> light.flatMap(_.title),
"rating" -> p.rating,
"provisional" -> p.provisional.option(true),
"withdraw" -> p.withdraw.option(true),
"score" -> p.score,
"ratingDiff" -> p.ratingDiff,
"sheet" -> sheet.map(sheetJson)
).noNull
}
private def podiumJson(id: String): Fu[Option[JsArray]] =
TournamentRepo finishedById id flatMap {
_ ?? { tour =>
PlayerRepo.bestByTourWithRank(id, 3).flatMap {
_.map {
case rp@RankedPlayer(_, player) => for {
pairings <- PairingRepo.finishedByPlayerChronological(tour.id, player.userId)
sheet = tour.system.scoringSystem.sheet(tour, player.userId, pairings)
tpr <- performance(tour, player, pairings)
} yield playerJson(sheet.some, tour, rp) ++ Json.obj(
"nb" -> sheetNbs(player.userId, sheet, pairings),
"performance" -> tpr)
}.sequenceFu
} map { l => JsArray(l).some }
}
}
private def pairingUserJson(userId: String) = getLightUser(userId).fold(userId)(_.name)
private def pairingJson(p: Pairing) = Json.obj(
"id" -> p.gameId,
"u" -> Json.arr(pairingUserJson(p.user1), pairingUserJson(p.user2)),
"s" -> (if (p.finished) p.winner match {
case Some(w) if w == p.user1 => 2
case Some(w) => 3
case _ => 1
}
else 0))
}
object JsonView {
private def formatDate(date: DateTime) = ISODateTimeFormat.dateTime print date
private[tournament] def scheduleJson(s: Schedule) = Json.obj(
"freq" -> s.freq.name,
"speed" -> s.speed.name)
private[tournament] implicit val clockWriter: Writes[TournamentClock] = Json.writes[TournamentClock]
private[tournament] def positionJson(s: chess.StartingPosition) = Json.obj(
"eco" -> s.eco,
"name" -> s.name,
"fen" -> s.fen)
private[tournament] implicit val spotlightWrites: OWrites[Spotlight] = OWrites { s =>
Json.obj(
"iconImg" -> s.iconImg,
"iconFont" -> s.iconFont
).noNull
}
private[tournament] implicit val perfTypeWrites: OWrites[PerfType] = OWrites { pt =>
Json.obj(
"icon" -> pt.iconChar.toString,
"name" -> pt.name)
}
}
|
clarkerubber/lila
|
modules/tournament/src/main/JsonView.scala
|
Scala
|
agpl-3.0
| 12,402
|
/* Copyright 2009-2021 EPFL, Lausanne */
import stainless.annotation._
object IfExpr1 {
def foo(): Int = {
var a = 1
var b = 2
if({a = a + 1; a != b})
a = a + 3
else
b = a + b
a
} ensuring(_ == 2)
}
|
epfl-lara/stainless
|
frontends/benchmarks/imperative/valid/IfExpr1.scala
|
Scala
|
apache-2.0
| 238
|
// #Sireum
/*
Copyright (c) 2017, Robby, Kansas State University
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.sireum
object Option {
@pure def some[T](value: T): Option[T] = {
return Some(value)
}
@pure def none[T](): Option[T] = {
return None()
}
}
@datatype trait Option[T] {
@pure def isEmpty: B
@pure def nonEmpty: B
@pure def map[T2](f: T => T2 @pure): Option[T2]
@pure def flatMap[T2](f: T => Option[T2] @pure): Option[T2]
@pure def forall(f: T => B @pure): B
@pure def exists(f: T => B @pure): B
@pure def get: T
@pure def getOrElse(default: => T): T
@pure def getOrElseEager(default: T): T
@pure def toIS: IS[Z, T]
def foreach[V](f: T => V): Unit
}
@datatype class None[T] extends Option[T] {
@pure def isEmpty: B = {
Contract(Ensures(Res))
return T
}
@pure def nonEmpty: B = {
Contract(Ensures(!Res[B]))
return F
}
@pure def map[T2](f: T => T2 @pure): Option[T2] = {
Contract(Ensures(Res == None[T2]()))
return None[T2]()
}
@pure def flatMap[T2](f: T => Option[T2] @pure): Option[T2] = {
Contract(Ensures(Res == None[T2]()))
return None[T2]()
}
@pure def forall(f: T => B @pure): B = {
Contract(Ensures(Res))
return T
}
@pure def exists(f: T => B @pure): B = {
Contract(Ensures(!Res[B]))
return F
}
@pure def getOrElse(default: => T): T = {
Contract(Ensures(Res == default))
return default
}
@pure def getOrElseEager(default: T): T = {
Contract(Ensures(Res == default))
return default
}
@pure def get: T = {
Contract(Requires(F))
halt("Invalid 'None' operation 'get'.")
}
@pure def toIS: IS[Z, T] = {
Contract(Ensures(Res[ISZ[T]].size == 0))
return IS[Z, T]()
}
def foreach[V](f: T => V): Unit = {}
}
@datatype class Some[T](value: T) extends Option[T] {
@pure def isEmpty: B = {
Contract(Ensures(!Res[B]))
return F
}
@pure def nonEmpty: B = {
Contract(Ensures(Res))
return T
}
@pure def map[T2](f: T => T2 @pure): Option[T2] = {
Contract(Ensures(Res == Some(f(value))))
return Some(f(value))
}
@pure def flatMap[T2](f: T => Option[T2] @pure): Option[T2] = {
Contract(Ensures(Res == f(value)))
return f(value)
}
@pure def forall(f: T => B @pure): B = {
Contract(Ensures(Res == f(value)))
return f(value)
}
@pure def exists(f: T => B @pure): B = {
Contract(Ensures(Res == f(value)))
return f(value)
}
@pure def getOrElse(default: => T): T = {
Contract(Ensures(Res == value))
return value
}
@pure def getOrElseEager(default: T): T = {
Contract(Ensures(Res == value))
return value
}
@pure def get: T = {
Contract(Ensures(Res == value))
return value
}
@pure def toIS: IS[Z, T] = {
Contract(Ensures(Res == ISZ(value)))
return ISZ(value)
}
def foreach[V](f: T => V): Unit = {
f(value)
}
}
|
sireum/v3-runtime
|
library/shared/src/main/scala/org/sireum/Option.scala
|
Scala
|
bsd-2-clause
| 4,177
|
package mesosphere.marathon
package raml
import mesosphere.marathon.state.{AbsolutePathId, AppDefinition, PathId, Timestamp, Group => CoreGroup, VersionInfo => CoreVersionInfo}
object GroupConversion {
def apply(groupUpdate: GroupUpdate, current: CoreGroup, timestamp: Timestamp): UpdateGroupStructureOp =
UpdateGroupStructureOp(groupUpdate, current, timestamp)
}
case class UpdateGroupStructureOp(
groupUpdate: GroupUpdate,
current: CoreGroup,
timestamp: Timestamp
) {
def apply(cf: App => AppDefinition): CoreGroup =
UpdateGroupStructureOp.execute(groupUpdate, current, timestamp)(cf)
}
object UpdateGroupStructureOp {
import Normalization._
private def requireGroupPath(groupUpdate: GroupUpdate): PathId = {
groupUpdate.id
.map(PathId(_))
.getOrElse(
// validation should catch this..
throw SerializationFailedException("No group id was given!")
)
}
private def normalizeAppDefinition(version: Timestamp): Normalization[AppDefinition] =
Normalization { app =>
app.copy(versionInfo = CoreVersionInfo.OnlyVersion(version))
}
private def normalizeApp(pathNormalization: Normalization[String]): Normalization[App] =
Normalization { app =>
app.copy(id = app.id.normalize(pathNormalization), dependencies = app.dependencies.map(_.normalize(pathNormalization)))
}
/**
* Creates a new [[state.Group]] from a [[GroupUpdate]], performing both normalization and conversion.
*/
private def createGroup(groupUpdate: GroupUpdate, gid: AbsolutePathId, version: Timestamp)(implicit
cf: App => AppDefinition
): CoreGroup = {
val pathNormalization: Normalization[String] = Normalization(PathId(_).canonicalPath(gid).toString)
implicit val appNormalization: Normalization[App] = normalizeApp(pathNormalization)
implicit val appDefinitionNormalization: Normalization[AppDefinition] = normalizeAppDefinition(version)
val appsById: Map[AbsolutePathId, AppDefinition] = groupUpdate.apps
.getOrElse(Set.empty)
.iterator
.map { currentApp =>
val app = cf(currentApp.normalize).normalize
app.id -> app
}
.toMap
val groupsById: Map[AbsolutePathId, CoreGroup] = groupUpdate.groups
.getOrElse(Seq.empty)
.iterator
.map { currentGroup =>
// TODO: tailrec needed
val id = requireGroupPath(currentGroup).canonicalPath(gid)
val group = createGroup(currentGroup, id, version)
group.id -> group
}
.toMap
CoreGroup(
id = gid,
apps = appsById,
pods = Map.empty,
groupsById = groupsById,
dependencies = groupUpdate.dependencies.fold(Set.empty[AbsolutePathId])(_.map(PathId(_).canonicalPath(gid))),
version = version,
enforceRole = groupUpdate.enforceRole
)
}
/**
* Main entrypoint for a group structure update operation.
* Implements create-or-update-or-delete for a group tree or subtree.
* Performs both normalization and conversion from RAML model to state model.
*
* Note - GroupUpdate should be normalized already and default enforceRole applied as appropriate
*/
private def execute(groupUpdate: GroupUpdate, current: CoreGroup, timestamp: Timestamp)(implicit
cf: App => AppDefinition
): CoreGroup = {
require(groupUpdate.scaleBy.isEmpty, "For a structural update, no scale should be given.")
require(groupUpdate.version.isEmpty, "For a structural update, no version should be given.")
assert(groupUpdate.enforceRole.isDefined, s"BUG! The group normalization should have set enforceRole for ${groupUpdate.id}.")
implicit val pathNormalization: Normalization[PathId] = Normalization(_.canonicalPath(current.id))
implicit val appNormalization: Normalization[AppDefinition] = normalizeAppDefinition(timestamp)
val effectiveGroups: Map[AbsolutePathId, CoreGroup] = groupUpdate.groups.fold(current.groupsById) { updates =>
updates.iterator.map { groupUpdate =>
val gid = requireGroupPath(groupUpdate).canonicalPath(current.id)
val newGroup = current.groupsById
.get(gid)
.map { group =>
execute(groupUpdate, group, timestamp) // TODO: tailrec needed
}
.getOrElse(createGroup(groupUpdate, gid, timestamp))
newGroup.id -> newGroup
}.toMap
}
val effectiveApps: Map[AbsolutePathId, AppDefinition] = {
groupUpdate.apps
.map(_.map(cf))
.getOrElse(current.apps.values)
.iterator
.map { currentApp =>
val app = currentApp.normalize
app.id -> app
}
.toMap
}
val effectiveDependencies = groupUpdate.dependencies.fold(current.dependencies)(_.map(PathId(_).canonicalPath(current.id)))
CoreGroup(
id = current.id,
apps = effectiveApps,
pods = current.pods,
groupsById = effectiveGroups,
dependencies = effectiveDependencies,
version = timestamp,
enforceRole = groupUpdate.enforceRole
)
}
}
|
mesosphere/marathon
|
src/main/scala/mesosphere/marathon/raml/GroupConversion.scala
|
Scala
|
apache-2.0
| 5,053
|
package games.utils
import scala.collection.mutable.Map
import scala.collection.mutable.ArrayBuffer
import games.math.{ Vector2f, Vector3f, Vector4f, Matrix3f }
object SimpleOBJParser {
class TexInfo(var path: String) {
var blendu: Boolean = true
var blendv: Boolean = true
var bumpMultiplier: Option[Float] = None
var boost: Option[Float] = None
var colorCorrection: Option[Boolean] = None
var clamp: Boolean = false
var channel: Option[String] = None
var modifierBase: Float = 0f
var modifierGain: Float = 1f
var offset: Vector3f = new Vector3f(0, 0, 0)
var resize: Vector3f = new Vector3f(1, 1, 1)
var turbulence: Vector3f = new Vector3f(0, 0, 0)
var resolution: Option[Int] = None
override def toString(): String = "TexInfo(path=\\"" + path + "\\")"
}
class Material(val name: String) {
var ambientColor: Option[Vector3f] = None
var diffuseColor: Option[Vector3f] = None
var specularColor: Option[Vector3f] = None
var specularCoef: Option[Float] = None
var sharpness: Float = 60f
var refractionIndex: Option[Float] = None
var transparency: Option[Float] = None
var illuminationModelIndex: Option[Int] = None
var ambientColorTexture: Option[TexInfo] = None
var diffuseColorTexture: Option[TexInfo] = None
var specularColorTexture: Option[TexInfo] = None
var specularCoefTexture: Option[TexInfo] = None
var bumpMapTexture: Option[TexInfo] = None
var displacementMapTexture: Option[TexInfo] = None
var decalTexture: Option[TexInfo] = None
override def toString(): String = "Material(name=\\"" + name + "\\")"
}
private def onOff(value: String): Boolean = value.toLowerCase() match {
case "1" | "on" => true
case "0" | "off" => false
case _ => throw new RuntimeException("Unknown value \\"" + value + "\\"")
}
// From http://en.wikipedia.org/wiki/CIE_1931_color_space#Construction_of_the_CIE_XYZ_color_space_from_the_Wright.E2.80.93Guild_data
private val cieToRgbMatrix = new Matrix3f(
0.41847f, -0.15866f, -0.082835f,
-0.091169f, 0.25243f, 0.015708f,
0.00092090f, -0.0025498f, 0.17860f)
private def cieToRgb(cie: Vector3f): Vector3f = cieToRgbMatrix * cie
private def parseFloat(s: String) = try { Some(s.toFloat) } catch { case _: Throwable => None }
private def parseTex(tokens: Array[String]): TexInfo = {
val texInfo = new TexInfo("<undefined>")
var currentShift = 1 // First token is the command name
while (currentShift < tokens.size) {
val remaining = tokens.size - currentShift + 1
tokens(currentShift).toLowerCase() match {
case "-blendu" if (remaining >= 2) =>
texInfo.blendu = onOff(tokens(currentShift + 1))
currentShift += 2
case "-blendv" if (remaining >= 2) =>
texInfo.blendv = onOff(tokens(currentShift + 1))
currentShift += 2
case "-bm" if (remaining >= 2) =>
texInfo.bumpMultiplier = Some(tokens(currentShift + 1).toFloat)
currentShift += 2
case "-boost" if (remaining >= 2) =>
texInfo.boost = Some(tokens(currentShift + 1).toFloat)
currentShift += 2
case "-cc" if (remaining >= 2) =>
texInfo.colorCorrection = Some(onOff(tokens(currentShift + 1)))
currentShift += 2
case "-clamp" if (remaining >= 2) =>
texInfo.clamp = onOff(tokens(currentShift + 1))
currentShift += 2
case "-imfchan" if (remaining >= 2) =>
texInfo.channel = Some(tokens(currentShift + 1).toLowerCase())
currentShift += 2
case "-mm" if (remaining >= 3) =>
texInfo.modifierBase = tokens(currentShift + 1).toFloat
texInfo.modifierGain = tokens(currentShift + 2).toFloat
currentShift += 3
case "-o" if (remaining >= 2) =>
val x = tokens(currentShift + 1).toFloat
(if (remaining >= 3) parseFloat(tokens(currentShift + 2)) else None) match {
case None =>
texInfo.offset = new Vector3f(x, 0, 0)
currentShift += 2
case Some(y) => (if (remaining >= 4) parseFloat(tokens(currentShift + 3)) else None) match {
case None =>
texInfo.offset = new Vector3f(x, y, 0)
currentShift += 3
case Some(z) =>
texInfo.offset = new Vector3f(x, y, z)
currentShift += 4
}
}
case "-s" if (remaining >= 2) =>
val x = tokens(currentShift + 1).toFloat
(if (remaining >= 3) parseFloat(tokens(currentShift + 2)) else None) match {
case None =>
texInfo.resize = new Vector3f(x, 1, 1)
currentShift += 2
case Some(y) => (if (remaining >= 4) parseFloat(tokens(currentShift + 3)) else None) match {
case None =>
texInfo.resize = new Vector3f(x, y, 1)
currentShift += 3
case Some(z) =>
texInfo.resize = new Vector3f(x, y, z)
currentShift += 4
}
}
case "-t" if (remaining >= 2) =>
val x = tokens(currentShift + 1).toFloat
(if (remaining >= 3) parseFloat(tokens(currentShift + 2)) else None) match {
case None =>
texInfo.turbulence = new Vector3f(x, 0, 0)
currentShift += 2
case Some(y) => (if (remaining >= 4) parseFloat(tokens(currentShift + 3)) else None) match {
case None =>
texInfo.turbulence = new Vector3f(x, y, 0)
currentShift += 3
case Some(z) =>
texInfo.turbulence = new Vector3f(x, y, z)
currentShift += 4
}
}
case "-texres" if (remaining >= 2) =>
texInfo.resolution = Some(tokens(currentShift + 1).toInt)
currentShift += 2
case _ =>
texInfo.path = tokens(currentShift)
currentShift += 1
}
}
texInfo
}
def parseMTL(mtlFile: String): scala.collection.Map[String, Material] = {
val mats: Map[String, Material] = Map()
var curMat: Option[Material] = None
def mat(): Material = curMat.getOrElse(throw new RuntimeException("No material currently selected"))
def flushCurMat(): Unit = for (cur <- curMat) {
mats += (cur.name -> cur)
curMat = None
}
for (currentLine <- mtlFile.lines) {
val index = currentLine.indexOf("#")
val line = if (index < 0) currentLine else currentLine.substring(0, index).trim()
val tokens = line.split(" ", -1)
(tokens(0).toLowerCase(), if (tokens.size >= 2) Some(tokens(1).toLowerCase()) else None) match {
case ("newmtl", _) if (tokens.size >= 2) =>
flushCurMat()
val matName = tokens(1)
val newMat = new Material(matName)
curMat = Some(newMat)
case ("ka", Some("spectral")) => println("Spectral Ka not supported")
case ("ka", Some("xyz")) if (tokens.size >= 5) =>
val x = tokens(2).toFloat
val y = tokens(3).toFloat
val z = tokens(4).toFloat
val cieXYZ = new Vector3f(x, y, z)
mat().ambientColor = Some(cieToRgb(cieXYZ))
case ("ka", _) if (tokens.size >= 4) =>
val r = tokens(1).toFloat
val g = tokens(2).toFloat
val b = tokens(3).toFloat
val rgb = new Vector3f(r, g, b)
mat().ambientColor = Some(rgb)
case ("kd", Some("spectral")) => println("Spectral Kd not supported")
case ("kd", Some("xyz")) if (tokens.size >= 5) =>
val x = tokens(2).toFloat
val y = tokens(3).toFloat
val z = tokens(4).toFloat
val cieXYZ = new Vector3f(x, y, z)
mat().diffuseColor = Some(cieToRgb(cieXYZ))
case ("kd", _) if (tokens.size >= 4) =>
val r = tokens(1).toFloat
val g = tokens(2).toFloat
val b = tokens(3).toFloat
val rgb = new Vector3f(r, g, b)
mat().diffuseColor = Some(rgb)
case ("ks", Some("spectral")) => println("Spectral Ks not supported")
case ("ks", Some("xyz")) if (tokens.size >= 5) =>
val x = tokens(2).toFloat
val y = tokens(3).toFloat
val z = tokens(4).toFloat
val cieXYZ = new Vector3f(x, y, z)
mat().specularColor = Some(cieToRgb(cieXYZ))
case ("ks", _) if (tokens.size >= 4) =>
val r = tokens(1).toFloat
val g = tokens(2).toFloat
val b = tokens(3).toFloat
val rgb = new Vector3f(r, g, b)
mat().specularColor = Some(rgb)
case ("tf", _) => println("Transmission filter not supported")
case ("illum", _) if (tokens.size >= 2) =>
val illum = tokens(1).toInt
mat().illuminationModelIndex = Some(illum)
case ("d", _) | ("tr", _) if (tokens.size >= 2) =>
val tr = tokens(1).toFloat
mat().transparency = Some(tr)
case ("ns", _) if (tokens.size >= 2) =>
val n = tokens(1).toFloat
mat().specularCoef = Some(n)
case ("sharpness", _) if (tokens.size >= 2) =>
val sharp = tokens(1).toFloat
mat().sharpness = sharp
case ("ni", _) if (tokens.size >= 2) =>
val indexOfRefraction = tokens(1).toFloat
mat().refractionIndex = Some(indexOfRefraction)
case ("map_ka", _) if (tokens.size >= 2) =>
val texInfo = parseTex(tokens)
mat().ambientColorTexture = Some(texInfo)
case ("map_kd", _) if (tokens.size >= 2) =>
val texInfo = parseTex(tokens)
mat().diffuseColorTexture = Some(texInfo)
case ("map_ks", _) if (tokens.size >= 2) =>
val texInfo = parseTex(tokens)
mat().specularColorTexture = Some(texInfo)
case ("map_ns", _) if (tokens.size >= 2) =>
val texInfo = parseTex(tokens)
mat().specularCoefTexture = Some(texInfo)
case ("", _) => // Empty line (probably a comment), ignore
case (arg, _) => println("Unknown or invalid MTL command \\"" + arg + "\\", ignoring the line")
}
}
flushCurMat()
mats
}
case class OBJVertex(position: Int, texture: Option[Int], normal: Option[Int])
type OBJFace = Array[OBJVertex]
class OBJObjectGroupPart(val material: Option[Material]) {
val faces: ArrayBuffer[OBJFace] = new ArrayBuffer[OBJFace]()
override def toString(): String = material match {
case Some(mat) => "ObjectGroupPart(material=\\"" + mat.name + "\\")"
case None => "ObjectGroupPart(no material)"
}
}
class OBJObjectGroup(val name: String) {
var smooth: Boolean = false
val parts: ArrayBuffer[OBJObjectGroupPart] = new ArrayBuffer[OBJObjectGroupPart]()
override def toString(): String = "ObjectGroup(name=\\"" + name + "\\")"
}
class OBJObject(val name: String) {
val vertices: ArrayBuffer[Vector4f] = new ArrayBuffer[Vector4f]()
val texCoordinates: ArrayBuffer[Vector3f] = new ArrayBuffer[Vector3f]()
val normals: ArrayBuffer[Vector3f] = new ArrayBuffer[Vector3f]()
val parameterVertices: ArrayBuffer[Vector3f] = new ArrayBuffer[Vector3f]()
val groups: ArrayBuffer[OBJObjectGroup] = ArrayBuffer[OBJObjectGroup]()
override def toString(): String = "Object(name=\\"" + name + "\\")"
}
def parseOBJ(objFile: String, mtlFiles: scala.collection.Map[String, String]): scala.collection.Map[String, OBJObject] = {
val objs: Map[String, OBJObject] = Map()
var curObjGroupPart: Option[OBJObjectGroupPart] = None
var curObjGroup: Option[OBJObjectGroup] = None
var curObj: Option[OBJObject] = None
val availableMats: Map[String, Material] = Map()
def objGroupPart(): OBJObjectGroupPart = curObjGroupPart.getOrElse(throw new RuntimeException("No material currently selected for object"))
def objGroup(): OBJObjectGroup = curObjGroup.getOrElse(throw new RuntimeException("No group currently selected for object"))
def obj(): OBJObject = curObj.getOrElse(throw new RuntimeException("No object currently selected"))
def flushCurObjGroupPart(): Unit = for (cur <- curObjGroupPart) {
if (!objGroup().parts.contains(cur)) objGroup().parts += cur
curObjGroupPart = None
}
def flushCurObjGroup(): Unit = for (cur <- curObjGroup) {
flushCurObjGroupPart()
if (!obj().groups.contains(cur)) obj().groups += cur
curObjGroup = None
}
def flushCurObj(): Unit = for (cur <- curObj) {
flushCurObjGroup()
if (!objs.contains(cur.name)) objs += (cur.name -> cur)
curObj = None
}
def getObjGroupPart(material: Option[Material]): OBJObjectGroupPart = {
val existingPart = objGroup().parts.find { _.material == material }
existingPart.getOrElse(new OBJObjectGroupPart(material))
}
def getObjGroup(name: String): OBJObjectGroup = {
val existingGroup = obj().groups.find { _.name == name }
existingGroup.getOrElse(new OBJObjectGroup(name))
}
def getObj(name: String): OBJObject = {
val existingObj = objs.get(name)
existingObj.getOrElse(new OBJObject(name))
}
for (currentLine <- objFile.lines) {
val index = currentLine.indexOf("#")
val line = if (index < 0) currentLine else currentLine.substring(0, index).trim()
val tokens = line.split(" ", -1)
tokens(0).toLowerCase() match {
// Vertex data
case "v" if (tokens.size >= 4) =>
val x = tokens(1).toFloat
val y = tokens(2).toFloat
val z = tokens(3).toFloat
val w = if (tokens.size >= 5) tokens(4).toFloat else 1.0f
val pos = new Vector4f(x, y, z, w)
obj().vertices += pos
case "vp" if (tokens.size >= 2) =>
val u = tokens(1).toFloat
val v = if (tokens.size >= 3) tokens(2).toFloat else 1.0f
val w = if (tokens.size >= 4) tokens(3).toFloat else 1.0f
val param = new Vector3f(u, v, w)
obj().parameterVertices += param
case "vn" if (tokens.size >= 4) =>
val x = tokens(1).toFloat
val y = tokens(2).toFloat
val z = tokens(3).toFloat
val norm = new Vector3f(x, y, z)
obj().normals += norm
case "vt" if (tokens.size >= 2) =>
val u = tokens(1).toFloat
val v = if (tokens.size >= 3) tokens(2).toFloat else 0.0f
val w = if (tokens.size >= 4) tokens(3).toFloat else 0.0f
val coord = new Vector3f(u, v, w)
obj().texCoordinates += coord
// Free-form curve/surface attributes
case "cstype" => println("Type of curve not supported")
case "deg" => println("Degree for curves and surfaces not supported")
case "bmat" => println("Basis matrices not supported")
case "step" => println("Step size for surces and surfaces not supported")
// Elements
case "p" => println("Point element not supported")
case "l" => println("Line element not supported")
case "f" =>
val face = new Array[OBJVertex](tokens.size - 1)
def strToInt(str: String): Option[Int] = {
if (str == "") None
else Some(str.toInt)
}
for (currentToken <- 1 until tokens.size) {
val indices = tokens(currentToken).split("/")
val vertex = indices.length match {
case 1 => OBJVertex(indices(0).toInt, None, None)
case 2 => OBJVertex(indices(0).toInt, strToInt(indices(1)), None)
case 3 => OBJVertex(indices(0).toInt, strToInt(indices(1)), strToInt(indices(2)))
case _ => throw new RuntimeException("Malformed vertex data \\"" + tokens(currentToken) + "\\"")
}
face(currentToken - 1) = vertex
}
objGroupPart().faces += face
case "curv" => println("Curve element not supported")
case "curv2" => println("2D curve element not supported")
case "surf" => println("Surface element not supported")
// Special curve and point
case "parm" => println("Parameter not supported")
case "trim" => println("Trimming not supported")
case "hole" => println("Hole not supported")
case "scrv" => println("Curve sequence not supported")
case "sp" => println("Special point not supported")
case "end" => println("End not supported")
// Connectivity
case "con" => println("Connectivity not supported")
// Grouping
case "g" if (tokens.size >= 2) =>
flushCurObjGroup()
val groupName = tokens(1)
val newObjGroup = getObjGroup(groupName)
curObjGroup = Some(newObjGroup)
val newObjGroupPart = getObjGroupPart(None)
curObjGroupPart = Some(newObjGroupPart)
case "s" if (tokens.size >= 2) =>
val smooth = onOff(tokens(1))
objGroup().smooth = smooth
case "mg" => println("Merging group not supported")
case "o" if (tokens.size >= 2) =>
flushCurObj()
val objName = tokens(1)
val newObj = getObj(objName)
curObj = Some(newObj)
val newObjGroup = getObjGroup("default")
curObjGroup = Some(newObjGroup)
val newObjGroupPart = getObjGroupPart(None)
curObjGroupPart = Some(newObjGroupPart)
// Display/render attributes
case "bevel" => println("Bevel not supported")
case "c_interp" => println("Color interopolation not supported")
case "d_interp" => println("Dissolve interpolation not supported")
case "lod" => println("Level of detail not supported")
case "maplib" => println("Library mapping not supported")
case "usemap" => println("Use mapping not supported")
case "usemtl" if (tokens.size >= 2) =>
flushCurObjGroupPart()
val selectedMatName = tokens(1)
val selectedMat = availableMats(selectedMatName)
val newSubObj = getObjGroupPart(Some(selectedMat))
curObjGroupPart = Some(newSubObj)
case "mtllib" if (tokens.size >= 2) =>
val mtlFile = mtlFiles(tokens(1))
availableMats ++= parseMTL(mtlFile)
case "shadow_obj" => println("Shadow object not supported")
case "trace_obj" => println("Tracing object not supported")
case "ctech" => println("Curve approximation not supported")
case "stech" => println("Surface approximation not supported")
// Curve and surface operation
case "bsp" => println("B-spline patch not supported")
case "bzp" => println("Bezier patch not supported")
case "cdc" => println("Cardinal curve not supported")
case "cdp" => println("Cardinal patch not supported")
case "res" => println("Reference and display not supported")
// Misc
case "" => // Empty line (probably a comment), ignore
case arg => println("Unknown or invalid OBJ command \\"" + arg + "\\", ignoring the line")
}
}
flushCurObj()
objs
}
class VertexData(val position: Vector3f, val texture: Option[Vector2f], val normal: Option[Vector3f])
type Tri = (Int, Int, Int) // The three indices of the vertices of the triangle
class SubTriMesh(val material: Option[Material], val tris: Array[Tri]) {
override def toString(): String = material.fold { "SubTriMesh(no material)" } { mat => s"""SubTriMesh(material="${mat.name}")""" }
}
class TriMesh(val name: String, val vertices: Array[Vector3f], val texCoordinates: Option[Array[Vector2f]],
val normals: Option[Array[Vector3f]], val submeshes: Array[SubTriMesh]) {
override def toString(): String = """TriMesh(name="${name}")"""
}
def convOBJObjectToTriMesh(obj: OBJObject): TriMesh = {
val subs = new ArrayBuffer[SubTriMesh]()
val vertices = new ArrayBuffer[Vector3f]()
val texCoordinates = new ArrayBuffer[Vector2f]()
val normals = new ArrayBuffer[Vector3f]()
def bufferIndexOfVertex(vertexData: VertexData): Int = {
val vertex = vertexData.position
val texCoordinate = vertexData.texture
val normal = vertexData.normal
val index = (texCoordinate, normal) match {
case (Some(tex), Some(norm)) =>
for (i <- 0 until vertices.size) {
if (vertices(i) == vertex && texCoordinates(i) == tex && normals(i) == norm) return i
}
// No matching vertex data found, add it at the end
vertices += vertex
texCoordinates += tex
normals += norm
vertices.size - 1 // return index of the new vertex
case (None, Some(norm)) =>
for (i <- 0 until vertices.size) {
if (vertices(i) == vertex && normals(i) == norm) return i
}
// No matching vertex data found, add it at the end
vertices += vertex
normals += norm
vertices.size - 1 // return index of the new vertex
case (Some(tex), None) =>
for (i <- 0 until vertices.size) {
if (vertices(i) == vertex && texCoordinates(i) == tex) return i
}
// No matching vertex data found, add it at the end
vertices += vertex
texCoordinates += tex
vertices.size - 1 // return index of the new vertex
case (None, None) =>
for (i <- 0 until vertices.size) {
if (vertices(i) == vertex) return i
}
// No matching vertex data found, add it at the end
vertices += vertex
vertices.size - 1 // return index of the new vertex
}
val formatErr = "The vertex data format is not uniform accross the vertices"
if (texCoordinates.size > 0 && texCoordinates.size != vertices.size) throw new RuntimeException(formatErr)
if (normals.size > 0 && normals.size != vertices.size) throw new RuntimeException(formatErr)
index
}
obj.groups.foreach { group =>
group.parts.filter { _.faces.size > 0 }.foreach { part =>
val trisIndices = new ArrayBuffer[Tri]()
def addTri(v0: OBJVertex, v1: OBJVertex, v2: OBJVertex): Unit = {
def dataFromFileIndices(v: OBJVertex): VertexData = {
// Data in OBJ files are indexed from 1 (instead of 0)
val indexV = v.position - 1
val optIndexT = v.texture.map{_ - 1}
val optIndexN = v.normal.map{_ - 1}
val ova = obj.vertices(indexV)
val ov = new Vector3f(ova.x, ova.y, ova.z)
val ot = optIndexT.map { t => val ota = obj.texCoordinates(t); new Vector2f(ota.x, ota.y) }
val on = optIndexN.map { n => obj.normals(n) }
new VertexData(ov, ot, on)
}
val v0Data = dataFromFileIndices(v0)
val v1Data = dataFromFileIndices(v1)
val v2Data = dataFromFileIndices(v2)
val v0Index = bufferIndexOfVertex(v0Data)
val v1Index = bufferIndexOfVertex(v1Data)
val v2Index = bufferIndexOfVertex(v2Data)
val newTri: Tri = (v0Index, v1Index, v2Index)
trisIndices += newTri
}
part.faces.foreach { face =>
face.size match {
case 3 =>
val v0 = face(0)
val v1 = face(1)
val v2 = face(2)
addTri(v0, v1, v2)
case 4 =>
val v0 = face(0)
val v1 = face(1)
val v2 = face(2)
val v3 = face(3)
addTri(v0, v1, v3)
addTri(v1, v2, v3)
case _ => throw new RuntimeException("Only faces composed of 3 of 4 vertices are supported")
}
}
val newSub = new SubTriMesh(part.material, trisIndices.toArray)
subs += newSub
}
}
new TriMesh(obj.name, vertices.toArray, if (texCoordinates.size > 0) Some(texCoordinates.toArray) else None, if (normals.size > 0) Some(normals.toArray) else None, subs.toArray)
}
}
|
joelross/scalajs-games
|
demo/shared/src/main/scala/games/utils/SimpleOBJParser.scala
|
Scala
|
bsd-3-clause
| 24,316
|
//############################################################################
// Enumerations
//############################################################################
object Test1 {
object WeekDays extends Enumeration {
val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value
}
def isWorkingDay(d: WeekDays.Value) =
! (d == WeekDays.Sat || d == WeekDays.Sun);
def run: Int = {
val it = WeekDays.values filter (isWorkingDay);
it.toList.length
}
}
object Test2 {
object ThreadState extends Enumeration {
val New = Value("NEW");
val Runnable = Value("RUNNABLE");
val Blocked = Value("BLOCKED");
val Waiting = Value("WAITING");
val TimedWaiting = Value("TIMED_WAITING");
val Terminated = Value("TERMINATED");
}
def run: Int = {
val it = for (s <- ThreadState.values; if s.id != 0) yield s;
it.toList.length
}
}
object Test3 {
object Direction extends Enumeration {
val North = Value("North")
val South = Value("South")
val East = Value("East")
val West = Value("West")
}
def run: Int = {
val it = for (d <- Direction.values; if d.toString() startsWith "N") yield d;
it.toList.length
}
}
object Test4 {
object Direction extends Enumeration {
val North = Value("North")
val South = Value("South")
val East = Value("East")
val West = Value("West")
}
def run: Int = {
val dir = Direction.withName("North")
assert(dir.toString == "North")
try {
Direction.withName("Nord")
assert(false)
} catch {
case e: Exception => /* do nothing */
}
0
}
}
object Test5 {
object D1 extends Enumeration(0) {
val North, South, East, West = Value;
}
object D2 extends Enumeration(-2) {
val North, South, East, West = Value;
}
object WeekDays extends Enumeration {
val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value
}
def run(): Unit = {
val s1 = D1.ValueSet(D1.North, D1.East)
val s2 = D2.North + D2.East
println(s1)
println(s2)
println(s1 + D1.West)
println(s2 + D2.West)
println(s1.toBitMask.map(_.toBinaryString).toList)
println(s2.toBitMask.map(_.toBinaryString).toList)
println(D1.ValueSet.fromBitMask(s1.toBitMask))
println(D2.ValueSet.fromBitMask(s2.toBitMask))
val r: WeekDays.ValueSet = WeekDays.values.range(WeekDays.Tue, WeekDays.Sat)
println(r)
}
}
object SerializationTest {
object Types extends Enumeration { val X, Y = Value }
class A extends java.io.Serializable { val types = Types.values }
class B extends java.io.Serializable { val types = Set(Types.X, Types.Y) }
def serialize(obj: AnyRef) = {
val baos = new java.io.ByteArrayOutputStream()
val oos = new java.io.ObjectOutputStream(baos)
oos.writeObject(obj)
oos.close()
val bais = new java.io.ByteArrayInputStream(baos.toByteArray)
val ois = new java.io.ObjectInputStream(bais)
val prime = ois.readObject()
ois.close()
prime
}
def run(): Unit = {
/* This is no longer possible with the proxy-based serialization for collections: */
//serialize(new B())
serialize(new A())
}
}
//############################################################################
// Test code
object Test {
def check_success(name: String, closure: => Int, expected: Int): Unit = {
Console.print("test " + name);
try {
val actual: Int = closure;
if (actual == expected) {
Console.print(" was successful");
} else {
Console.print(" failed: expected "+ expected +", found "+ actual);
}
} catch {
case exception: Throwable => {
Console.print(" raised exception " + exception);
exception.printStackTrace();
}
}
Console.println()
}
def main(args: Array[String]): Unit = {
check_success("Test1", Test1.run, 5);
check_success("Test2", Test2.run, 5);
check_success("Test3", Test3.run, 1);
check_success("Test4", Test4.run, 0);
Console.println()
Test5.run()
Console.println()
SerializationTest.run()
}
}
//############################################################################
|
scala/scala
|
test/files/run/enums.scala
|
Scala
|
apache-2.0
| 4,167
|
object Test extends App {
// #2235
new A2235 with B2235
}
// only one overloaded alternative is allowed to have defaults
class A {
def foo(a: Int = 0) = a
def foo(b: String = "another") = b
}
class B {
def foo(a: Int) = a
def bar(u: String = "ldksj") = u
}
class C extends B {
override def foo(a: Int = 1092) = a
def foo(b: String = "lskdfj")
def bar(i: Int = 129083) = i
}
// #2235
trait A2235 { def f(x: Int = 1) = x }
trait B2235 { def f(x: String = "1") = x }
|
yusuke2255/dotty
|
tests/untried/neg/names-defaults-neg-ref.scala
|
Scala
|
bsd-3-clause
| 487
|
package io.finch.endpoint
import cats.effect.Sync
import com.twitter.io.{Buf, Reader}
import io.finch._
import io.finch.internal._
import io.finch.items._
import java.nio.charset.{Charset, StandardCharsets}
import scala.reflect.ClassTag
private[finch] abstract class FullBody[F[_], A] extends Endpoint[F, A] {
protected def F: Sync[F]
protected def missing: F[Output[A]]
protected def present(contentType: String, content: Buf, cs: Charset): F[Output[A]]
final def apply(input: Input): EndpointResult[F, A] =
if (input.request.isChunked) EndpointResult.NotMatched[F]
else {
val output = F.suspend {
val contentLength = input.request.contentLengthOrNull
if (contentLength == null || contentLength == "0") missing
else present(
input.request.mediaTypeOrEmpty,
input.request.content,
input.request.charsetOrUtf8
)
}
EndpointResult.Matched(input, Trace.empty, output)
}
final override def item: RequestItem = items.BodyItem
}
private[finch] object FullBody {
trait PreparedBody[F[_], A, B] { _: FullBody[F, B] =>
protected def prepare(a: A): B
}
trait Required[F[_], A] extends PreparedBody[F, A, A] { _: FullBody[F, A] =>
protected def prepare(a: A): A = a
protected def missing: F[Output[A]] = F.raiseError(Error.NotPresent(items.BodyItem))
}
trait Optional[F[_], A] extends PreparedBody[F, A, Option[A]] { _: FullBody[F, Option[A]] =>
protected def prepare(a: A): Option[A] = Some(a)
protected def missing: F[Output[Option[A]]] = F.pure(Output.None)
}
}
private[finch] abstract class Body[F[_], A, B, CT](implicit
dd: Decode.Dispatchable[A, CT],
ct: ClassTag[A],
protected val F: Sync[F]
) extends FullBody[F, B] with FullBody.PreparedBody[F, A, B] {
protected def present(contentType: String, content: Buf, cs: Charset): F[Output[B]] =
dd(contentType, content, cs) match {
case Right(s) => F.pure(Output.payload(prepare(s)))
case Left(e) => F.raiseError(Error.NotParsed(items.BodyItem, ct, e))
}
final override def toString: String = "body"
}
private[finch] abstract class BinaryBody[F[_], A](implicit protected val F: Sync[F])
extends FullBody[F, A] with FullBody.PreparedBody[F, Array[Byte], A] {
protected def present(contentType: String, content: Buf, cs: Charset): F[Output[A]] =
F.pure(Output.payload(prepare(content.asByteArray)))
final override def toString: String = "binaryBody"
}
private[finch] abstract class StringBody[F[_], A](implicit protected val F: Sync[F])
extends FullBody[F, A]
with FullBody.PreparedBody[F, String, A] {
protected def present(contentType: String, content: Buf, cs: Charset): F[Output[A]] =
F.pure(Output.payload(prepare(content.asString(cs))))
final override def toString: String = "stringBody"
}
private[finch] abstract class ChunkedBody[F[_], S[_[_], _], A] extends Endpoint[F, S[F, A]] {
protected def F: Sync[F]
protected def prepare(r: Reader[Buf], cs: Charset): Output[S[F, A]]
final def apply(input: Input): EndpointResult[F, S[F, A]] =
if (!input.request.isChunked) EndpointResult.NotMatched[F]
else EndpointResult.Matched(
input,
Trace.empty,
F.delay(prepare(input.request.reader, input.request.charsetOrUtf8))
)
final override def item: RequestItem = items.BodyItem
}
private[finch] final class BinaryBodyStream[F[_], S[_[_], _]](implicit
LR: LiftReader[S, F],
protected val F: Sync[F]
) extends ChunkedBody[F, S, Array[Byte]] with (Buf => Array[Byte]) {
def apply(buf: Buf): Array[Byte] = buf.asByteArray
protected def prepare(r: Reader[Buf], cs: Charset): Output[S[F, Array[Byte]]] =
Output.payload(LR(r, this))
override def toString: String = "binaryBodyStream"
}
private[finch] final class StringBodyStream[F[_], S[_[_], _]](implicit
LR: LiftReader[S, F],
protected val F: Sync[F]
) extends ChunkedBody[F, S, String] with (Buf => String) {
def apply(buf: Buf): String = buf.asString(StandardCharsets.UTF_8)
protected def prepare(r: Reader[Buf], cs: Charset): Output[S[F, String]] = cs match {
case StandardCharsets.UTF_8 => Output.payload(LR(r, this))
case _ => Output.payload(LR(r, _.asString(cs)))
}
override def toString: String = "stringBodyStream"
}
private[finch] final class BodyStream[F[_], S[_[_], _], A, CT <: String](implicit
protected val F: Sync[F],
LR: LiftReader[S, F],
A: DecodeStream.Aux[S, F, A, CT]
) extends ChunkedBody[F, S, A] {
protected def prepare(r: Reader[Buf], cs: Charset): Output[S[F, A]] =
Output.payload(A(LR(r), cs))
override def toString: String = "bodyStream"
}
|
ImLiar/finch
|
core/src/main/scala/io/finch/endpoint/body.scala
|
Scala
|
apache-2.0
| 4,655
|
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.core.javac
import javax.lang.model.`type`.{ ArrayType, DeclaredType }
import scala.collection.JavaConverters._
import scala.collection.breakOut
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import com.sun.source.tree.{ IdentifierTree, MemberSelectTree, Scope, Tree }
import com.sun.source.util.TreePath
import javax.lang.model.`type`.TypeMirror
import javax.lang.model.element._
import javax.lang.model.util.ElementFilter
import org.ensime.api.{ BasicTypeInfo => _, _ }
import org.ensime.core.CompletionUtil
import org.ensime.model.BasicTypeInfo
trait JavaCompletionsAtPoint { requires: JavaCompiler =>
import CompletionUtil._
def askCompletionsAtPoint(info: SourceFileInfo,
offset: Int,
maxResultsArg: Int,
caseSens: Boolean): CompletionInfoList = {
val maxResults = if (maxResultsArg == 0) Int.MaxValue else maxResultsArg
val s = createJavaFileObject(info).getCharContent(false).toString
val preceding = s.slice(Math.max(0, offset - 100), offset)
val defaultPrefix = JavaIdentRegexp.findFirstMatchIn(preceding) match {
case Some(m) => m.group(1)
case _ => ""
}
val constructing = ConstructingRegexp.findFirstMatchIn(preceding).isDefined
val indexAfterTarget = Math.max(0, offset - defaultPrefix.length - 1)
val precedingChar = s(indexAfterTarget)
val isMemberAccess = precedingChar == '.'
val candidates: List[CompletionInfo] =
(if (ImportSubtypeRegexp.findFirstMatchIn(preceding).isDefined) {
// Erase the trailing partial subtype (it breaks type resolution).
val patched = s.substring(0, indexAfterTarget) + " " + s.substring(
indexAfterTarget + defaultPrefix.length + 1
);
(pathToPoint(SourceFileInfo(info.file, Some(patched), None),
indexAfterTarget - 1) map {
case (c: Compilation, path: TreePath) => {
memberCandidates(c, path.getLeaf, defaultPrefix, true, caseSens)
}
})
} else if (ImportRegexp.findFirstMatchIn(preceding).isDefined) {
(pathToPoint(info, indexAfterTarget) flatMap {
case (c: Compilation, path: TreePath) => {
getEnclosingMemberSelectTree(path).map { m =>
packageMemberCandidates(c, m, defaultPrefix, caseSens)
}
}
})
} else if (isMemberAccess) {
// Erase the trailing partial member (it breaks type resolution).
val patched = s.substring(0, indexAfterTarget) + ".wait()" + s
.substring(indexAfterTarget + defaultPrefix.length + 1);
(pathToPoint(SourceFileInfo(info.file, Some(patched), None),
indexAfterTarget + 1) flatMap {
case (c: Compilation, path: TreePath) => {
getEnclosingMemberSelectTree(path).map { m =>
memberCandidates(c,
m.getExpression(),
defaultPrefix,
false,
caseSens)
}
}
})
} else {
// Kick off an index search if the name looks like a type.
val typeSearch =
if (TypeNameRegex.findFirstMatchIn(defaultPrefix).isDefined) {
Some(
fetchTypeSearchCompletions(defaultPrefix, maxResults, indexer)
)
} else None
(scopeForPoint(info, indexAfterTarget) map {
case (c: Compilation, s: Scope) => {
scopeMemberCandidates(c, s, defaultPrefix, caseSens, constructing)
}
}) map { scopeCandidates =>
val typeSearchResult =
typeSearch.flatMap(Await.result(_, Duration.Inf)).getOrElse(Nil)
scopeCandidates ++ typeSearchResult
}
}).getOrElse(Nil)
CompletionInfoList(defaultPrefix,
candidates
.sortWith({ (c1, c2) =>
c1.relevance > c2.relevance ||
(c1.relevance == c2.relevance &&
c1.name.length < c2.name.length)
})
.take(maxResults))
}
private def getEnclosingMemberSelectTree(
path: TreePath
): Option[MemberSelectTree] = {
var p = path
while (p != null) {
p.getLeaf match {
case m: MemberSelectTree => return Some(m)
case _ => {}
}
p = p.getParentPath
}
None
}
private def selectedPackageName(m: MemberSelectTree): String = {
val name = m.getIdentifier.toString
m.getExpression match {
case m: MemberSelectTree => selectedPackageName(m) + "." + name
case i: IdentifierTree => i.getName.toString() + "." + name
case _ => name
}
}
private def packageMemberCandidates(
compilation: Compilation,
select: MemberSelectTree,
prefix: String,
caseSense: Boolean
): List[CompletionInfo] = {
val pkg = selectedPackageName(select)
val candidates = (Option(compilation.elements.getPackageElement(pkg)) map {
p: PackageElement =>
p.getEnclosedElements().asScala.flatMap { e =>
filterElement(compilation, e, prefix, caseSense, true, false)
}
}).getOrElse(Nil)
candidates.toList
}
private def filterElement(c: Compilation,
e: Element,
prefix: String,
caseSense: Boolean,
typesOnly: Boolean,
constructors: Boolean,
baseRelevance: Int = 0): List[CompletionInfo] = {
val s = e.getSimpleName.toString
// reward case case-sensitive matches
val relevance =
if (s.startsWith(prefix)) baseRelevance + 50 else baseRelevance
if (matchesPrefix(s, prefix, matchEntire = false, caseSens = caseSense) && !s
.contains("$") && !s.contains("<init>")) {
e match {
case e: ExecutableElement if !typesOnly =>
List(methodInfo(e, relevance + 5))
case e: VariableElement if !typesOnly =>
List(fieldInfo(e, relevance + 10))
case e: TypeElement =>
if (constructors) constructorInfos(c, e, relevance + 5)
else List(typeInfo(e, relevance))
case _ => Nil
}
} else Nil
}
private def scopeMemberCandidates(
compilation: Compilation,
scope: Scope,
prefix: String,
caseSense: Boolean,
constructing: Boolean
): List[CompletionInfo] = {
val candidates = ArrayBuffer[CompletionInfo]()
// Note Scope#getLocalElements does not include fields / members of
// enclosing classes. Need to add those manually.
//
def addTypeMembers(tel: TypeElement, relevance: Int): Unit =
for (el <- compilation.elements.getAllMembers(tel).asScala) {
for (info <- filterElement(compilation,
el,
prefix,
caseSense,
false,
constructing,
relevance)) {
candidates += info
}
}
var relevance = 0
for (tel <- Option(scope.getEnclosingClass())) {
addTypeMembers(tel, relevance)
var t = tel.getEnclosingElement()
while (t != null) {
t match {
case tel: TypeElement => addTypeMembers(tel, relevance)
case _ =>
}
t = t.getEnclosingElement()
relevance -= 10
}
}
relevance = 0
var s = scope
while (s != null) {
for (el <- s.getLocalElements().asScala) {
for (info <- filterElement(compilation,
el,
prefix,
caseSense,
false,
constructing,
relevance)) {
candidates += info
}
}
s = s.getEnclosingScope()
relevance -= 10
}
candidates.toList
}
private def memberCandidates(
c: Compilation,
target: Tree,
prefix: String,
importing: Boolean,
caseSense: Boolean
): List[CompletionInfo] =
typeElement(c, target).toList.flatMap {
case tel: TypeElement =>
val path = c.trees.getPath(c.compilationUnit, target)
val scope = c.trees.getScope(path)
val isAccessible: Element => Boolean = c.trees
.isAccessible(scope, _, c.types.getDeclaredType(tel))
c.elements
.getAllMembers(tel)
.asScala
.filter(isAccessible)
.flatMap { el =>
filterElement(c, el, prefix, caseSense, importing, false)
}(breakOut)
case e =>
log.warn("Unrecognized type element " + e)
List.empty
}
private def methodName(
e: ExecutableElement
)(formatType: TypeMirror => String): String = {
val params = e.getParameters.asScala.map { param =>
val paramType = formatType(param.asType())
val paramName = param.getSimpleName
s"$paramType $paramName"
}.mkString("(", ", ", ")")
val returns = formatType(e.getReturnType)
val identifierName = e.getSimpleName
s"$returns $identifierName$params"
}
private def fullMethodName(t: ExecutableElement): String =
methodName(t)(_.toString())
private def shortMethodName(t: ExecutableElement): String =
methodName(t)(_.toString.split("\\.").last)
private def typeMirrorToTypeInfo(t: TypeMirror): TypeInfo =
BasicTypeInfo(t.toString, DeclaredAs.Class, t.toString)
private def methodInfo(e: ExecutableElement,
relevance: Int): CompletionInfo = {
val params = e.getParameters.asScala.map { param =>
param.getSimpleName.toString ->
typeMirrorToTypeInfo(param.asType())
}
val typeInfo = ArrowTypeInfo(
shortMethodName(e),
fullMethodName(e),
typeMirrorToTypeInfo(e.getReturnType),
ParamSectionInfo(
params,
isImplicit = false
) :: Nil,
Nil
)
CompletionInfo(
Some(typeInfo),
e.getSimpleName.toString,
relevance,
None
)
}
private def fieldInfo(e: VariableElement, relevance: Int): CompletionInfo = {
val t = e.asType
CompletionInfo(
Some(BasicTypeInfo(renderShortType(t), DeclaredAs.Field, t.toString)),
e.getSimpleName.toString,
relevance,
None
)
}
private def renderShortType(t: TypeMirror): String = t match {
case t: ArrayType => renderShortType(t.getComponentType) + "[]"
case t: DeclaredType =>
t.asElement.getSimpleName + (t.getTypeArguments.asScala match {
case Seq() => ""
case args => args.map(renderShortType).mkString("<", ",", ">")
})
case _ => t.toString
}
private def typeInfo(e: TypeElement, relevance: Int): CompletionInfo = {
val s = e.getSimpleName.toString
CompletionInfo(
None,
s,
relevance,
None
)
}
private def constructorInfos(compilation: Compilation,
e: TypeElement,
relevance: Int): List[CompletionInfo] = {
val s = e.getSimpleName.toString
ElementFilter
.constructorsIn(compilation.elements.getAllMembers(e))
.asScala
.map(methodInfo(_, relevance))
.map { m =>
m.copy(name = s)
}
.toList
}
}
|
jozi-k/ensime-server
|
core/src/main/scala/org/ensime/core/javac/JavaCompletionsAtPoint.scala
|
Scala
|
gpl-3.0
| 11,904
|
//package io.eels.component.hive.dialect
//
//import java.io.{BufferedReader, InputStream, InputStreamReader}
//
//import com.github.tototoshi.csv.{CSVWriter, DefaultCSVFormat}
//import com.sksamuel.exts.Logging
//import com.typesafe.scalalogging.slf4j.StrictLogging
//import io.eels.component.hive.{HiveDialect, HiveWriter, Predicate}
//import io.eels.schema.Schema
//import io.eels.{InternalRow, SourceReader}
//import org.apache.hadoop.fs.{FileSystem, Path}
//
//object TextHiveDialect extends HiveDialect with Logging {
//
// val delimiter = '\\u0001'
//
// private def lineIterator(in: InputStream): Iterator[String] = {
// val buff = new BufferedReader(new InputStreamReader(in))
// Iterator.continually(buff.readLine).takeWhile(_ != null)
// }
//
// // todo add case insensitivity
// override def writer(schema: Schema, path: Path)
// (implicit fs: FileSystem): HiveWriter = new HiveWriter {
// logger.debug(s"Creating text writer for $path with delimiter=${TextHiveDialect.delimiter}")
//
// val out = fs.create(path, false)
// val csv = CSVWriter.open(out)(new DefaultCSVFormat {
// override val delimiter: Char = TextHiveDialect.delimiter
// override val lineTerminator: String = "\\n"
// })
//
// override def write(row: InternalRow): Unit = {
// // builds a map of the column names to the row values (by using the source schema), then generates
// // a new sequence of values ordered by the columns in the target schema
// val map = schema.fieldNames.zip(row).map { case (columName, value) => columName -> value }.toMap
// val seq = schema.fieldNames.map(map.apply)
// csv.writeRow(seq)
// }
//
// override def close(): Unit = {
// csv.close()
// out.close()
// }
// }
//
// override def reader(path: Path, tableSchema: Schema, requestedSchema: Schema, predicate: Option[Predicate])
// (implicit fs: FileSystem): SourceReader = new SourceReader {
// val in = fs.open(path)
// val iter = lineIterator(in)
// override def close(): Unit = in.close()
// override def iterator: Iterator[InternalRow] = new Iterator[InternalRow] {
// override def hasNext: Boolean = iter.hasNext
// override def next(): InternalRow = iter.next.split(delimiter).padTo(tableSchema.size, null).toSeq
// }
// }
//}
|
eel-lib/eel
|
eel-hive/src/main/scala/io/eels/component/hive/dialect/TextHiveDialect.scala
|
Scala
|
mit
| 2,346
|
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats
import io.truthencode.ddo.model.religions.Amaunator
import io.truthencode.ddo.support.requisite.{FeatRequisiteImpl, RequiresAllOfFeat}
trait BelovedOfAmaunator
extends FeatRequisiteImpl with ForgottenRealmsReligionNonWarforged with BelovedLevelBase
with RequiresAllOfFeat with Amaunator with AmaunatorFeatBase { self: DeityFeat =>
override def allOfFeats: Seq[Feat] = List(DeityFeat.ChildOfAmaunator)
}
|
adarro/ddo-calc
|
subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/BelovedOfAmaunator.scala
|
Scala
|
apache-2.0
| 1,123
|
import scala.quoted.*
object Macro {
def impl[A : Type](using Quotes): Expr[A] = {
import quotes.reflect.*
TypeRepr.of[A].asType match
case '[tpe] => '{ (a: tpe) => ???}
'{???}
}
}
|
dotty-staging/dotty
|
tests/pos-macros/i8865.scala
|
Scala
|
apache-2.0
| 203
|
/*
* Copyright (c) 2014 François Cabrol.
*
* This file is part of MURAL.
*
* MURAL is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* MURAL is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with MURAL. If not, see <http://www.gnu.org/licenses/>.
*/
package com.cabrol.francois.mural.generator.rulebased.parameters
import com.cabrol.francois.libjamu.musictheory.entity.note.Note
import com.cabrol.francois.mural.generator.rulebased.method.GenerationMethod
object Direction extends Enumeration {
type Direction = Value
val up, down, both = Value
}
case class RangeInt(min: Int, max: Int) extends Range(min, max, 1)
case class PhraseParameters(duration: RangeInt = RangeInt(0, 5), gap: RangeInt = RangeInt(0, 3))
class Parameters(val method:GenerationMethod,
val parentNotes:List[Note],
val numBeatsPerBar:Int,
val numBars:Int,
val ambitus:Ambitus,
val harmonicProgression: HarmonicProgression,
val percentageOfSilence:Int,
val numOfNotesAtTheSameTimeUnit:Int,
val varianceDirection:Direction.Direction,
val variance:Int,
val rhythmicDensity: Density.DensityVal,
val variation:Int,
val percentageNotesInChords:Int,
val phrase:PhraseParameters = PhraseParameters()) {
require(Range(0, 101).contains(percentageOfSilence), "The percentage of silence needs to be between 0 and 100")
require(Range(0, 101).contains(percentageNotesInChords), "The percentage of notes in chords needs to be between 0 and 100")
require(numBeatsPerBar >= 1, "Number of beats per bar in chords needs to be >= 1")
require(numBars >= 1, "Number of bars in chords needs to be >= 1")
require(numOfNotesAtTheSameTimeUnit >= 1, "Number of notes at the same time unit needs to be >= 1")
def sequenceLenght:Int = numBars * numBeatsPerBar
}
|
francoiscabrol/MURAL
|
src/main/scala/com/cabrol/francois/mural/generator/rulebased/parameters/Parameters.scala
|
Scala
|
gpl-3.0
| 2,449
|
package com.sksamuel.elastic4s
import com.sksamuel.elastic4s.ElasticDsl._
import com.sksamuel.elastic4s.Preference.Shards
import com.sksamuel.elastic4s.SuggestMode.{Missing, Popular}
import com.sksamuel.elastic4s.analyzers.{SnowballAnalyzer, StandardAnalyzer, WhitespaceAnalyzer}
import org.elasticsearch.common.geo.GeoDistance
import org.elasticsearch.common.unit.DistanceUnit
import org.elasticsearch.index.query.MatchQueryBuilder.{Operator, ZeroTermsQuery}
import org.elasticsearch.index.query.MultiMatchQueryBuilder.Type
import org.elasticsearch.index.query.{MatchQueryBuilder, RegexpFlag, SimpleQueryStringFlag}
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval
import org.elasticsearch.search.aggregations.bucket.terms.Terms
import org.elasticsearch.search.sort.SortOrder
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FlatSpec, OneInstancePerTest}
/** @author Stephen Samuel */
class SearchDslTest extends FlatSpec with MockitoSugar with JsonSugar with OneInstancePerTest {
"the search dsl" should "accept wilcards for index and types" in {
val req = search in "*" types "*" limit 10
req.show should matchJsonResource("/json/search/search_test1.json")
}
it should "accept sequences for indexes" in {
val req = search in("twitter", "other") types "*" limit 5 query "coldplay"
req.show should matchJsonResource("/json/search/search_test2.json")
}
it should "accept sequences for type" in {
val req = search in "*" types("users", "tweets") from 5 query "sammy"
req.show should matchJsonResource("/json/search/search_test3.json")
}
it should "use limit and and offset when specified" in {
val req = search in "*" types("users", "tweets") limit 6 from 9 query "coldplay"
req.show should matchJsonResource("/json/search/search_test4.json")
}
it should "use terminateAfter when specified" in {
val req = search in "*" types("users", "tweets") terminateAfter 5 query "coldplay"
req.show should matchJsonResource("/json/search/search_test_terminate_after.json")
}
it should "use fetchSource when specified" in {
val req = search in "*" types("users", "tweets") fetchSource false query "coldplay"
req.show should matchJsonResource("/json/search/search_test_fetch_source.json")
}
it should "use preference when specified" in {
val req = search in "*" types("users", "tweets") query "coldplay" preference Preference.PrimaryFirst
req.show should matchJsonResource("/json/search/search_preference_primary_first.json")
}
it should "use custom preference when specified" in {
val req = search in "*" types("users", "tweets") query "coldplay" preference Preference.Custom("custom")
req.show should matchJsonResource("/json/search/search_preference_custom.json")
}
it should "generate json for a raw query" in {
val req = search in "*" types("users", "tweets") limit 5 rawQuery {
"""{ "prefix": { "bands": { "prefix": "coldplay", "boost": 5.0, "rewrite": "yes" } } }"""
} searchType SearchType.Scan
req.show should matchJsonResource("/json/search/search_test5.json")
}
it should "generate json for a prefix query" in {
val req = search in "*" types("users", "tweets") limit 5 query {
prefixQuery("bands" -> "coldplay") boost 5 rewrite "yes"
} searchType SearchType.Scan
req.show should matchJsonResource("/json/search/search_test5.json")
}
it should "generate json for a term query" in {
val req = search in "*" types("users", "tweets") limit 5 query {
termQuery("singer", "chris martin") boost 1.6
} searchType SearchType.DfsQueryAndFetch
req.show should matchJsonResource("/json/search/search_term.json")
}
it should "generate json for a range query" in {
val req = search in "*" types("users", "tweets") limit 5 query {
rangeQuery("coldplay") includeLower true includeUpper true from 4 to 10 boost 1.2
} searchType SearchType.QueryThenFetch
req.show should matchJsonResource("/json/search/search_range.json")
}
it should "generate json for a wildcard query" in {
val req = search in "*" types("users", "tweets") limit 5 query {
wildcardQuery("name", "*coldplay") boost 7.6 rewrite "no"
}
req.show should matchJsonResource("/json/search/search_wildcard.json")
}
it should "generate json for a string query" in {
val req = search in "*" types("users", "tweets") limit 5 query {
stringQuery("coldplay") allowLeadingWildcard true analyzeWildcard true analyzer WhitespaceAnalyzer autoGeneratePhraseQueries true defaultField "name" boost 6.5 enablePositionIncrements true fuzzyMaxExpansions 4 fuzzyPrefixLength 3 lenient true phraseSlop 10 tieBreaker 0.5 operator "OR" rewrite "writer"
} searchType SearchType.DfsQueryThenFetch
req.show should matchJsonResource("/json/search/search_string.json")
}
it should "generate json for a regex query" in {
val req = search in "*" types("users", "tweets") limit 5 query {
regexQuery("drummmer" -> "will*") boost 4 flags RegexpFlag.INTERSECTION rewrite "rewrite-to"
} searchType SearchType.DfsQueryAndFetch
req.show should matchJsonResource("/json/search/search_regex.json")
}
it should "generate json for a min score" in {
val req = search in "*" types("users", "tweets") query "coldplay" minScore 0.5
req.show should matchJsonResource("/json/search/search_minscore.json")
}
it should "generate json for an index boost" in {
val req = search in "*" types("users", "tweets") query "coldplay" indexBoost("index1" -> 1.4, "index2" -> 1.3)
req.show should matchJsonResource("/json/search/search_indexboost.json")
}
it should "generate json for a bpoosting query" in {
val req = search in "*" types("users", "tweets") limit 5 query {
boostingQuery positive {
stringQuery("coldplay")
} negative {
stringQuery("jethro tull")
} negativeBoost 5.6 positiveBoost 7.6
} searchType SearchType.DfsQueryAndFetch
req.show should matchJsonResource("/json/search/search_boosting.json")
}
it should "generate json for a id query" in {
val req = search in "*" types("users", "tweets") limit 5 query {
idsQuery("1", "2", "3") boost 1.6 types("a", "b")
}
req.show should matchJsonResource("/json/search/search_id.json")
}
it should "generate json for a match query" in {
val req = search in "*" types("users", "tweets") limit 5 query {
matchQuery("name", "coldplay")
.cutoffFrequency(3.4)
.fuzzyTranspositions(true)
.maxExpansions(4)
.operator(MatchQueryBuilder.Operator.AND)
.zeroTermsQuery(ZeroTermsQuery.ALL)
.slop(3)
.setLenient(true)
.minimumShouldMatch("75%")
.fuzziness(2f)
.prefixLength(4)
.analyzer(SnowballAnalyzer)
} searchType SearchType.QueryThenFetch
req.show should matchJsonResource("/json/search/search_match.json")
}
it should "generate json for a match query with default as or" in {
val req = search in "*" types("users", "tweets") limit 5 query {
matchQuery("drummmer" -> "will") boost 4 operator "OR"
}
req.show should matchJsonResource("/json/search/search_match_or.json")
}
it should "generate json for a fuzzy query" in {
val req = search in "*" types("users", "tweets") limit 5 query {
fuzzyQuery("drummmer", "will") boost 4 maxExpansions 10 prefixLength 10 transpositions true
} searchType SearchType.QueryThenFetch
req.show should matchJsonResource("/json/search/search_fuzzy.json")
}
it should "generate json for a filtered query" in {
val req = search in "music" types "bands" query {
filteredQuery query {
"coldplay"
} filter {
termQuery("location", "uk")
} boost 1.2
} preference Preference.Primary
req.show should matchJsonResource("/json/search/search_query_filteredquery.json")
}
it should "generate json for a match all query" in {
val req = search in "*" types("users", "tweets") limit 5 query {
matchAllQuery boost 4
} searchType SearchType.QueryAndFetch
req.show should matchJsonResource("/json/search/search_match_all.json")
}
it should "generate json for a hasChild query" in {
val req = search in "*" types("users", "tweets") limit 5 query {
hasChildQuery("sometype") query {
"coldplay"
} boost 1.2 scoreMode "type"
} searchType SearchType.QueryThenFetch
req.show should matchJsonResource("/json/search/search_haschild_query.json")
}
it should "generate json for a hasParent query" in {
val req = search in "*" types("users", "tweets") limit 5 query {
hasParentQuery("sometype") query {
"coldplay"
} boost 1.2 scoreType "type"
} searchType SearchType.QueryThenFetch preference new Preference.Custom("custompref")
req.show should matchJsonResource("/json/search/search_hasparent_query.json")
}
it should "generate json for a boolean compound query" in {
val req = search in "*" types("bands", "artists") limit 5 query {
bool {
must(
regexQuery("drummmer" -> "will*") boost 5 flags RegexpFlag.ANYSTRING,
termQuery("singer" -> "chris")
) should termQuery("bassist" -> "berryman") not termQuery("singer" -> "anderson")
}
} preference Preference.Local
req.show should matchJsonResource("/json/search/search_boolean.json")
}
it should "generate json for a boolean query" in {
val req = search in "space" -> "planets" limit 5 query {
bool {
must(
regexQuery("drummmer" -> "will*") boost 5,
termQuery("singer" -> "chris")
) should {
termQuery("bassist" -> "berryman")
} not {
termQuery("singer" -> "anderson")
}
} boost 2.4 minimumShouldMatch 2 adjustPureNegative false disableCoord true queryName "booly"
} preference Preference.Local
req.show should matchJsonResource("/json/search/search_boolean2.json")
}
it should "generate json for a match phrase query" in {
val req = search("*").types("bands", "artists").limit(5).query {
matchPhraseQuery("name", "coldplay")
.cutoffFrequency(3.4)
.fuzzyTranspositions(true)
.maxExpansions(4)
.operator(MatchQueryBuilder.Operator.AND)
.zeroTermsQuery(ZeroTermsQuery.ALL)
.slop(3)
.operator("AND")
.minimumShouldMatch("75%")
.fuzziness(2f)
.boost(15)
.setLenient(true)
.prefixLength(4)
.analyzer(SnowballAnalyzer)
} preference Preference.OnlyNode("a")
req.show should matchJsonResource("/json/search/search_match_phrase.json")
}
it should "generate json for a match phrase prefix query" in {
val req = search("*").types("bands", "artists").limit(5).query {
matchPhrasePrefixQuery("name", "coldplay")
.cutoffFrequency(3.4)
.fuzzyTranspositions(true)
.maxExpansions(4)
.operator(MatchQueryBuilder.Operator.AND)
.zeroTermsQuery(ZeroTermsQuery.ALL)
.slop(3)
.operator("AND")
.minimumShouldMatch("75%")
.setLenient(true)
.fuzziness(2f)
.prefixLength(4)
.analyzer(SnowballAnalyzer)
} preference Preference.OnlyNode("a")
req.show should matchJsonResource("/json/search/search_match_phrase_prefix.json")
}
it should "generate json for term post filter" in {
val req = search in "music" types "bands" postFilter {
termQuery("singer", "chris martin") queryName "namey"
} preference Preference.Shards("a")
req.show should matchJsonResource("/json/search/search_term_filter.json")
}
it should "generate json for terms lookup filter" in {
val req = search in "music" types "bands" postFilter {
termsQuery("user", "val", "vallllll").queryName("namey")
}
req.show should matchJsonResource("/json/search/search_terms_lookup_filter.json")
}
it should "generate json for regex query" in {
val req = search in "music" types "bands" postFilter {
regexQuery("singer", "chris martin")
} preference Preference.PreferNode("a")
req.show should matchJsonResource("/json/search/search_regex_query.json")
}
it should "generate json for prefix query" in {
val req = search in "music" types "bands" postFilter {
prefixQuery("singer", "chris martin")
} preference Preference.Primary
req.show should matchJsonResource("/json/search/search_prefix_query.json")
}
it should "generate json for has child filter with filter" in {
val req = search in "music" types "bands" postFilter {
hasChildQuery("singer").query {
termQuery("name", "chris")
}.minChildren(2).maxChildren(4).shortCircuitCutoff(3).boost(2.3).queryName("namey")
} preference Preference.Primary
req.show should matchJsonResource("/json/search/search_haschild_filter.json")
}
it should "generate json for has parent filter with filter" in {
val req = search in "music" types "bands" postFilter {
hasParentQuery("singer").query {
termQuery("name", "chris")
}.boost(2.3).scoreType("scoreType").queryName("spidername")
} preference Preference.Primary
req.show should matchJsonResource("/json/search/search_hasparent_filter.json")
}
it should "generate json for nested filter with filter" in {
val req = search in "music" types "bands" postFilter {
nestedQuery("singer").query {
termQuery("name", "chris")
} queryName "namey"
} preference Preference.Primary
req.show should matchJsonResource("/json/search/search_nested_filter.json")
}
it should "generate json for has child filter with query" in {
val req = search in "music" types "bands" postFilter {
hasChildQuery("singer") query {
termQuery("name", "chris")
} queryName "namey"
} preference Preference.Primary
req.show should matchJsonResource("/json/search/search_haschild_filter_query.json")
}
it should "generate json for has parent filter with query" in {
val req = search in "music" types "bands" postFilter {
hasParentQuery("singer") query {
termQuery("name", "chris")
} queryName "namey"
} preference Preference.Primary
req.show should matchJsonResource("/json/search/search_hasparent_filter_query.json")
}
it should "generate json for nested filter with query" in {
val req = search in "music" types "bands" postFilter {
nestedQuery("singer") query {
termQuery("name", "chris")
} queryName "namey"
} preference Preference.Primary
req.show should matchJsonResource("/json/search/search_nested_filter_query.json")
}
it should "generate json for id filter" in {
val req = search in "music" types "bands" postFilter {
idsQuery("a", "b", "c").types("x", "y", "z")
} preference Preference.PrimaryFirst
req.show should matchJsonResource("/json/search/search_id_filter.json")
}
it should "generate json for type filter" in {
val req = search in "music" types "bands" postFilter {
typeQuery("sometype")
} preference new Shards("5", "7")
req.show should matchJsonResource("/json/search/search_type_filter.json")
}
it should "generate json for type range filter" in {
val req = search in "music" types "bands" postFilter {
rangeQuery("released") includeLower true includeUpper true gte "2010-01-01" lte "2012-12-12"
} preference new Shards("5", "7")
req.show should matchJsonResource("/json/search/search_range_filter.json")
}
it should "generate json for missing filter" in {
val req = search in "music" types "bands" postFilter {
missingQuery("producer") existence true queryName "named" includeNull true
} preference Preference.PrimaryFirst
req.show should matchJsonResource("/json/search/search_missing_filter.json")
}
it should "generate json for field sort" in {
val req = search in "music" types "bands" sort {
fieldSort("singer") missing "no-singer" order SortOrder.DESC mode MultiMode.Avg nestedPath "nest"
}
req.show should matchJsonResource("/json/search/search_sort_field.json")
}
it should "generate json for nested field sort" in {
val req = search in "music" types "bands" sort {
fieldSort("singer.weight") ignoreUnmapped true order SortOrder.DESC mode MultiMode.Sum nestedFilter {
termQuery("singer.name", "coldplay")
}
}
req.show should matchJsonResource("/json/search/search_sort_nested_field.json")
}
it should "generate correct json for score sort" in {
val req = search in "music" types "bands" sort {
scoreSort().missing("213").order(SortOrder.ASC)
}
req.show should matchJsonResource("/json/search/search_sort_score.json")
}
it should "generate correct json for script sort" in {
val req = search in "music" types "bands" sort {
scriptSort("document.score") typed "number" lang "java" order SortOrder.DESC nestedPath "a.b.c" sortMode "min"
} preference new Preference.Custom("custom-node")
req.show should matchJsonResource("/json/search/search_sort_script.json")
}
it should "generate correct json for script sort with params" in {
val req = search in "music" types "bands" sort {
scriptSort("doc.score") typed "number" order SortOrder.DESC params Map("param1" -> "value1", "param2" -> "value2")
} preference new Preference.Custom("custom-node")
req.show should matchJsonResource("/json/search/search_sort_script_params.json")
}
it should "generate correct json for geo sort" in {
val req = search in "music" types "bands" sort {
geo sort "location" geohash "ABCDEFG" missing "567.8889" order SortOrder.DESC mode
MultiMode.Sum point(56.6, 78.8) nested "nested-path" mode MultiMode.Max geoDistance GeoDistance.ARC
}
req.show should matchJsonResource("/json/search/search_sort_geo.json")
}
it should "generate correct json for multiple sorts" in {
val req = search in "music" types "bands" sort(
scriptSort("document.score") as "java" order SortOrder.ASC,
scoreSort().order(SortOrder.DESC),
fieldSort("dancer") order SortOrder.DESC
)
req.show should matchJsonResource("/json/search/search_sort_multiple.json")
}
it should "generate json for field sort with score tracking enabled" in {
val req = search in "music" types "bands" trackScores true sort {
fieldSort("singer") order SortOrder.DESC
}
req.show should matchJsonResource("/json/search/search_sort_track_scores.json")
}
it should "generate correct json for geo bounding box filter" in {
val req = search in "music" types "bands" postFilter {
geoBoxQuery("box") left 40.6 top 56.5 right 45.5 bottom 12.55
}
req.show should matchJsonResource("/json/search/search_filter_geo_boundingbox.json")
}
it should "generate correct json for geo bounding box filter2" in {
val req = search in "music" types "bands" postFilter {
geoBoxQuery("box") left 40.6 top 56.5 bottom 12.55 right 45.5
}
req.show should matchJsonResource("/json/search/search_filter_geo_boundingbox.json")
}
it should "generate correct json for geo bounding box filter3" in {
val req = search in "music" types "bands" postFilter {
geoBoxQuery("box") top 56.5 left 40.6 right 45.5 bottom 12.55
}
req.show should matchJsonResource("/json/search/search_filter_geo_boundingbox.json")
}
it should "generate correct json for dismax query" in {
val req = search in "music" types "bands" query {
dismax boost 4.5 query "coldplay" query "london" tieBreaker 1.2
}
req.show should matchJsonResource("/json/search/search_query_dismax.json")
}
it should "generate correct json for common terms query" in {
val req = search in "music" types "bands" query {
commonQuery("name") text "some text here" analyzer WhitespaceAnalyzer boost 12.3 cutoffFrequency 14.4 highFreqOperator "AND" lowFreqOperator "OR" lowFreqMinimumShouldMatch 3 highFreqMinimumShouldMatch 2
}
req.show should matchJsonResource("/json/search/search_query_commonterms.json")
}
it should "generate correct json for constant score query" in {
val req = search in "music" types "bands" query {
constantScoreQuery {
termQuery("name", "sammy")
} boost 14.5
}
req.show should matchJsonResource("/json/search/search_query_constantscore.json")
}
it should "generate correct json for terms query" in {
val req = search in "music" types "bands" query {
termsQuery("name", "chris", "will", "johnny", "guy") boost 1.2 minimumShouldMatch 4 disableCoord true
}
req.show should matchJsonResource("/json/search/search_query_terms.json")
}
it should "generate correct json for multi match query" in {
val req = search in "music" types "bands" query {
multiMatchQuery("this is my query") fields("name", "location", "genre") analyzer WhitespaceAnalyzer boost 3.4 cutoffFrequency 1.7 fuzziness "something" prefixLength 4 minimumShouldMatch 2 tieBreaker 4.5 zeroTermsQuery
MatchQueryBuilder
.ZeroTermsQuery
.ALL fuzzyRewrite "some-rewrite" maxExpansions 4 lenient true prefixLength 4 operator Operator
.AND matchType Type.CROSS_FIELDS
}
req.show should matchJsonResource("/json/search/search_query_multi_match.json")
}
it should "generate correct json for multi match query with minimum should match text clause" in {
val req = search in "music" types "bands" query {
multiMatchQuery("this is my query") fields("name", "location", "genre") minimumShouldMatch "2<-1 5<80%" matchType "best_fields"
}
req.show should matchJsonResource("/json/search/search_query_multi_match_minimum_should_match.json")
}
it should "generate correct json for geo distance filter" in {
val req = search in "music" types "bands" postFilter {
bool(
should(
geoDistanceQuery("distance") point(10.5d, 35.0d) geoDistance GeoDistance
.FACTOR geohash "geo1234" distance "120mi"
) not (
geoDistanceQuery("distance") lat 45.4d lon 76.6d distance(45, DistanceUnit.YARD)
)
)
}
req.show should matchJsonResource("/json/search/search_filter_geo_distance.json")
}
it should "generate correct json for a rescore query" in {
val req = search in "music" types "bands" rescore {
rescore("coldplay").originalQueryWeight(1.4).rescoreQueryWeight(5.4).scoreMode("modey").window(14)
}
req.show should matchJsonResource("/json/search/search_rescore.json")
}
it should "generate correct json for function score query" in {
val req = search in "music" types "bands" query {
functionScoreQuery("coldplay").boost(1.4).maxBoost(1.9).scoreMode("multiply").boostMode("max").scorers(
randomScore(1234).weight(1.2),
scriptScore("some script here").weight(0.5),
gaussianScore("field1", "1m", "2m").filter(termQuery("band", "coldplay")),
fieldFactorScore("field2").factor(1.2).filter(termQuery("band", "taylor swift"))
)
}
req.show should matchJsonResource("/json/search/search_function_score.json")
}
it should "generate correct json for geo polygon filter" in {
val req = search in "music" types "bands" postFilter {
geoPolygonQuery("distance") point(10, 10) point(20, 20) point(30, 30) point "123456"
}
req.show should matchJsonResource("/json/search/search_filter_geo_polygon.json")
}
it should "generate correct json for a boolean filter" in {
val req = search in "music" types "bands" postFilter {
bool {
must {
termQuery("name", "sammy")
} should {
termQuery("location", "oxford")
} not {
termQuery("type", "rap")
}
}
}
req.show should matchJsonResource("/json/search/search_filter_bool.json")
}
it should "generate correct json for datehistogram aggregation" in {
val req = search in "music" types "bands" aggs {
aggregation datehistogram "years" field "date" interval DateHistogramInterval.YEAR minDocCount 0
}
req.show should matchJsonResource("/json/search/search_aggregations_datehistogram.json")
}
it should "generate correct json for range aggregation" in {
val req = search in "music" types "bands" aggs {
aggregation range "range_agg" field "score" range(10.0, 15.0)
}
req.show should matchJsonResource("/json/search/search_aggregations_range.json")
}
it should "generate correct json for date range aggregation" in {
val req = search in "music" types "bands" aggs {
aggregation daterange "daterange_agg" field "date" range("now-1Y", "now")
}
req.show should matchJsonResource("/json/search/search_aggregations_daterange.json")
}
it should "generate correct json for date range aggregation with unbounded from" in {
val req = search in "music" types "bands" aggs {
aggregation daterange "daterange_agg" field "date" unboundedFrom("key", "now-1Y")
}
req.show should matchJsonResource("/json/search/search_aggregations_daterange_from.json")
}
it should "generate correct json for date range aggregation with unbounded to" in {
val req = search in "music" types "bands" aggs {
aggregation daterange "daterange_agg" field "date" unboundedTo("key", "now")
}
req.show should matchJsonResource("/json/search/search_aggregations_daterange_to.json")
}
it should "generate correct json for histogram aggregation" in {
val req = search in "music" types "bands" aggs {
aggregation histogram "score_histogram" field "score" interval 2
}
req.show should matchJsonResource("/json/search/search_aggregations_histogram.json")
}
it should "generate correct json for filter aggregation" in {
val req = search in "music" types "bands" aggs {
aggregation filter "my_filter_agg" filter {
bool {
must {
termQuery("name", "sammy")
} should {
termQuery("location", "oxford")
} not {
termQuery("type", "rap")
}
}
}
}
req.show should matchJsonResource("/json/search/search_aggregations_filter.json")
}
it should "generate correct json for terms aggregation" in {
val req = search in "music" types "bands" aggs {
aggregation terms "my_terms_agg" field "keyword" size 10 order Terms.Order.count(false)
}
req.show should matchJsonResource("/json/search/search_aggregations_terms.json")
}
it should "generate correct json for top hits aggregation" in {
val req = search in "music" types "bands" aggs {
aggregation terms "top-tags" field "tags" size 3 order Terms.Order.count(false) aggregations (
aggregation topHits "top_tag_hits" size 1 sort {
fieldSort("last_activity_date") order SortOrder.DESC
} fetchSource(Array("title"), Array.empty)
)
}
req.show should matchJsonResource("/json/search/search_aggregations_top_hits.json")
}
it should "generate correct json for geobounds aggregation" in {
val req = search in "music" types "bands" aggs {
aggregation geobounds "geo_agg" field "geo_point" wrapLongitude true
}
req.show should matchJsonResource("/json/search/search_aggregations_geobounds.json")
}
it should "generate correct json for geodistance aggregation" in {
val req = search in "music" types "bands" aggs {
aggregation geodistance "geo_agg" field "geo_point" point(45.0, 27.0) geoDistance GeoDistance.ARC range(1.0, 1.0)
}
req.show should matchJsonResource("/json/search/search_aggregations_geodistance.json")
}
it should "generate correct json for sub aggregation" in {
val req = search in "music" types "bands" aggs {
aggregation datehistogram "days" field "date" interval DateHistogramInterval.DAY aggs(
aggregation terms "keywords" field "keyword" size 5,
aggregation terms "countries" field "country")
}
req.show should matchJsonResource("/json/search/search_aggregations_datehistogram_subs.json")
}
it should "generate correct json for min aggregation" in {
val req = search in "school" types "student" aggs {
aggregation min "grades_min" field "grade" script {
script("doc['grade'].value").lang("lua").param("apple", "bad")
}
}
req.show should matchJsonResource("/json/search/search_aggregations_min.json")
}
it should "generate correct json for max aggregation" in {
val req = search in "school" types "student" aggs {
aggregation max "grades_max" field "grade" script {
script("doc['grade'].value").lang("lua")
}
}
req.show should matchJsonResource("/json/search/search_aggregations_max.json")
}
it should "generate correct json for sum aggregation" in {
val req = search in "school" types "student" aggs {
aggregation sum "grades_sum" field "grade" script {
script("doc['grade'].value").lang("lua") params Map("classsize" -> "30", "room" -> "101A")
}
}
req.show should matchJsonResource("/json/search/search_aggregations_sum.json")
}
it should "generate correct json for avg aggregation" in {
val req = search in "school" types "student" aggs {
aggregation avg "grades_avg" field "grade" script {
script("doc['grade'].value").lang("lua")
}
}
req.show should matchJsonResource("/json/search/search_aggregations_avg.json")
}
it should "generate correct json for stats aggregation" in {
val req = search in "school" types "student" aggs {
aggregation stats "grades_stats" field "grade" script {
script("doc['grade'].value").lang("lua")
}
}
req.show should matchJsonResource("/json/search/search_aggregations_stats.json")
}
it should "generate correct json for extendedstats aggregation" in {
val req = search in "school" types "student" aggs {
aggregation extendedstats "grades_extendedstats" field "grade" script {
script("doc['grade'].value").lang("lua")
}
}
req.show should matchJsonResource("/json/search/search_aggregations_extendedstats.json")
}
it should "generate correct json for percentiles aggregation" in {
val req = search in "school" types "student" aggs {
aggregation percentiles "grades_percentiles" field "grade" percents(95, 99, 99.9) compression 200
}
req.show should matchJsonResource("/json/search/search_aggregations_percentiles.json")
}
it should "generate correct json for percentileranks aggregation" in {
val req = search in "school" types "student" aggs {
aggregation percentileranks "grades_percentileranks" field "grade" percents(95, 99, 99.9) compression 200
}
req.show should matchJsonResource("/json/search/search_aggregations_percentileranks.json")
}
it should "generate correct json for value count aggregation" in {
val req = search in "school" types "student" aggs {
aggregation count "grades_count" field "grade" script {
script("doc['grade'].value").lang("lua")
}
}
req.show should matchJsonResource("/json/search/search_aggregations_count.json")
}
it should "generate correct json for cardinality aggregation" in {
val req = search in "school" types "student" aggs {
aggregation cardinality "grades_cardinality" field "grade" rehash true precisionThreshold 40000
}
req.show should matchJsonResource("/json/search/search_aggregations_cardinality.json")
}
it should "generate correct json for nested aggregation" in {
val req = search in "music" types "bands" aggs {
aggregation nested "nested_agg" path "nested_obj" aggs {
aggregation terms "my_nested_terms_agg" field "keyword"
}
}
req.show should matchJsonResource("/json/search/search_aggregations_nested.json")
}
it should "generate correct json for highlighting" in {
val req = search in "music" types "bands" highlighting(
options tagSchema TagSchema.Styled boundaryChars "\\\\b" boundaryMaxScan 4 order HighlightOrder
.Score preTags "<b>" postTags "</b>" encoder HighlightEncoder.Html,
"name" fragmentSize 100 numberOfFragments 3 fragmentOffset 4,
"type" numberOfFragments 100 fragmentSize 44 highlighterType "some-type"
)
req.show should matchJsonResource("/json/search/search_highlighting.json")
}
it should "generate correct json for multiple suggestions" in {
val req = search in "music" types "bands" query "coldplay" suggestions(
term suggestion "my-suggestion-1" text "clocks by culdpaly" field "names" maxEdits 4 mode Popular shardSize 2 accuracy 0.6,
term suggestion "my-suggestion-2" text "aqualuck by jethro toll" field "names" size 5 mode Missing minDocFreq 0.2 prefixLength 3,
term suggestion "my-suggestion-3" text "bountiful day by u22" field "names" analyzer StandardAnalyzer maxInspections 3 stringDistance "levenstein",
term suggestion "my-suggestion-4" text "whatever some text" field "names" maxTermFreq 0.5 minWordLength 5 mode SuggestMode
.Always
)
req.show should matchJsonResource("/json/search/search_suggestions_multiple.json")
}
// for backwards compatibility default suggester is the term suggester
it should "generate correct json for suggestions" in {
val req = search in "music" types "bands" query termQuery("name", "coldplay") suggestions(
term suggestion "suggestion-1" text "clocks by culdpaly" field "name" maxEdits 2,
term suggestion "suggestion-2" text "aqualuck by jethro toll" field "name"
)
req.show should matchJsonResource("/json/search/search_suggestions.json")
}
it should "generate correct json for script fields" in {
val req =
search in "sesportfolio" types "positions" query matchAllQuery scriptfields(
scriptField("balance") script "portfolioscript" lang "native" params Map("fieldName" -> "rate_of_return"),
scriptField("date") script "doc['date'].value" lang "groovy"
)
req.show should matchJsonResource("/json/search/search_script_field_poc.json")
}
it should "generate correct json for suggestions of multiple suggesters" in {
val req = search in "music" types "bands" query termQuery("name", "coldplay") suggestions(
term suggestion "suggestion-term" text "culdpaly" field "name" maxEdits 2,
phrase suggestion "suggestion-phrase" text "aqualuck by jethro toll" field "name",
completion suggestion "suggestion-completion" text "cold" field "ac"
)
req.show should matchJsonResource("/json/search/search_suggestions_multiple_suggesters.json")
}
it should "generate correct json for context queries" in {
val req = search in "music" types "bands" suggestions (
completion suggestion "my-suggestion-1" text "wildcats by ratatat" field "colors" context("genre", "electronic")
)
req.show should matchJsonResource("/json/search/search_suggestions_context.json")
}
it should "generate correct json for context queries with an Iterable argument" in {
val req = search in "music" types "bands" suggestions (
completion suggestion "my-suggestion-1" text "wildcats by ratatat" field "colors" context("genre", Seq(
"electronic",
"alternative rock"))
)
req.show should matchJsonResource("/json/search/search_suggestions_context_multiple.json")
}
it should "generate correct json for nested query" in {
val req = search in "music" types "bands" query {
nestedQuery("obj1") query {
constantScoreQuery {
termQuery("name", "sammy")
}
} scoreMode "avg" boost 14.5 queryName "namey"
}
req.show should matchJsonResource("/json/search/search_query_nested.json")
}
it should "generate correct json for a SpanTermQueryDefinition" in {
val req = search in "*" types("users", "tweets") query {
spanTermQuery("name", "coldplay").boost(123)
}
req.show should matchJsonResource("/json/search/search_query_span_term.json")
}
it should "generate correct json for a geo distance range filter" in {
val req = search in "*" types("users", "tweets") postFilter {
geoDistanceQuery("postcode").geohash("hash123").queryName("myfilter")
}
req.show should matchJsonResource("/json/search/search_filter_geo_range.json")
}
it should "generate correct json for a simple string query" in {
val req = search in "*" types("users", "tweets") query {
simpleStringQuery("coldplay")
.analyzer("whitespace")
.defaultOperator("AND")
.field("name")
.flags(SimpleQueryStringFlag.AND, SimpleQueryStringFlag.OR, SimpleQueryStringFlag.NOT)
}
req.show should matchJsonResource("/json/search/search_simple_string_query.json")
}
it should "generate correct json for default filtered query" in {
val req = filteredQuery filter termQuery("singer", "lemmy")
req.builder.toString should matchJsonResource("/json/search/search_default_query.json")
}
it should "generate correct json for global aggregation" in {
val req = search in "music" types "bands" aggs {
aggregation global "global_agg"
}
req.show should matchJsonResource("/json/search/search_aggregations_global.json")
}
it should "generate json for ignored field type sort" in {
val req = search in "music" types "bands" sort {
fieldSort("singer.weight") unmappedType "long" order SortOrder.DESC
}
req.show should matchJsonResource("/json/search/search_sort_unmapped_field_type.json")
}
}
|
k4200/elastic4s
|
elastic4s-core-tests/src/test/scala/com/sksamuel/elastic4s/SearchDslTest.scala
|
Scala
|
apache-2.0
| 37,683
|
package org.jetbrains.plugins.scala.lang.psi
import com.intellij.psi._
import org.jetbrains.plugins.scala.editor.documentationProvider.ScalaDocumentationProvider
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.api.statements.params._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.ScTemplateBody
import org.jetbrains.plugins.scala.lang.psi.types.nonvalue.Parameter
import org.jetbrains.plugins.scala.lang.psi.types.result._
import org.jetbrains.plugins.scala.lang.psi.types.{ScSubstitutor, ScType}
import org.jetbrains.plugins.scala.lang.refactoring.util.ScTypeUtil
import org.jetbrains.plugins.scala.project.{ProjectContext, ProjectContextOwner}
import org.jetbrains.plugins.scala.util.ScalaUtils
/**
* User: Alexander Podkhalyuzin
* Date: 12.08.2009
*/
object PresentationUtil {
def presentationString(owner: ProjectContextOwner): String = {
implicit val project = owner.projectContext
presentationString(owner.asInstanceOf[Any])(project)
}
def presentationString(obj: Any)
(implicit project: ProjectContext): String = presentationString(obj, ScSubstitutor.empty)
def presentationString(obj: Any, substitutor: ScSubstitutor)
(implicit project: ProjectContext): String = {
val res: String = obj match {
case clauses: ScParameters => clauses.clauses.map(presentationString(_, substitutor)).mkString("")
case clause: ScParameterClause =>
val buffer = new StringBuilder("")
buffer.append("(")
if (clause.isImplicit) buffer.append("implicit ")
buffer.append(clause.parameters.map(presentationString(_, substitutor)).mkString(", "))
buffer.append(")")
buffer.toString()
case param: ScParameter => ScalaDocumentationProvider.parseParameter(param)(presentationString(_, substitutor))
case param: Parameter =>
val builder = new StringBuilder
builder.append(param.name)
builder.append(": " + presentationString(param.paramType, substitutor))
if (param.isRepeated) builder.append("*")
if (param.isDefault) builder.append(" = _")
builder.toString()
case tp: ScType => substitutor.subst(tp).presentableText
case tp: PsiEllipsisType =>
presentationString(tp.getComponentType, substitutor) + "*"
case tp: PsiType =>
presentationString(tp.toScType(), substitutor)
case tp: ScTypeParamClause =>
tp.typeParameters.map(t => presentationString(t, substitutor)).mkString("[", ", ", "]")
case param: ScTypeParam =>
var paramText = param.name
if (param.isContravariant) paramText = "-" + paramText
else if (param.isCovariant) paramText = "+" + paramText
val stdTypes = param.projectContext.stdTypes
param.lowerBound foreach {
case stdTypes.Nothing =>
case tp: ScType => paramText = paramText + " >: " + presentationString(tp, substitutor)
}
param.upperBound foreach {
case stdTypes.Any =>
case tp: ScType => paramText = paramText + " <: " + presentationString(tp, substitutor)
}
param.viewBound foreach {
(tp: ScType) => paramText = paramText + " <% " + presentationString(tp, substitutor)
}
param.contextBound foreach {
(tp: ScType) => paramText = paramText + " : " + presentationString(ScTypeUtil.stripTypeArgs(substitutor.subst(tp)), substitutor)
}
paramText
case param: PsiTypeParameter =>
var paramText = param.name
//todo: possibly add supers and extends?
paramText
case params: PsiParameterList =>
params.getParameters.map(presentationString(_, substitutor)).mkString("(", ", ", ")")
case param: PsiParameter =>
val buffer: StringBuilder = new StringBuilder("")
val list = param.getModifierList
if (list == null) return ""
val lastSize = buffer.length
for (a <- list.getAnnotations) {
if (lastSize != buffer.length) buffer.append(" ")
val element = a.getNameReferenceElement
if (element != null) buffer.append("@").append(element.getText)
}
if (lastSize != buffer.length) buffer.append(" ")
val name = param.name
if (name != null) {
buffer.append(name)
}
buffer.append(": ")
buffer.append(presentationString(param.getType, substitutor)) //todo: create param type, java.lang.Object => Any
buffer.toString()
case fun: ScFunction =>
val buffer: StringBuilder = new StringBuilder("")
fun.getParent match {
case _: ScTemplateBody if fun.containingClass != null =>
val qual = fun.containingClass.qualifiedName
if (qual != null) {
buffer.append(qual).append(".")
}
case _ =>
}
buffer.append(fun.name)
fun.typeParametersClause match {case Some(tpc) => buffer.append(presentationString(tpc)) case _ =>}
buffer.append(presentationString(fun.paramClauses, substitutor)).append(": ")
buffer.append(presentationString(fun.returnType.getOrAny, substitutor))
buffer.toString()
case elem: PsiElement => elem.getText
case null => ""
case _ => obj.toString
}
res.replace(ScalaUtils.typeParameter, "T")
}
}
|
gtache/intellij-lsp
|
intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/psi/PresentationUtil.scala
|
Scala
|
apache-2.0
| 5,484
|
package com.github.diegopacheco.sandbox.scala.akka.dispatcher
package com.github.diegopacheco.sandbox.scala.akka.timeout
import akka.actor.Actor
import akka.actor.ActorRef
import akka.actor.ActorSystem
import akka.actor.Props
import akka.actor.actorRef2Scala
/**
* Dispatchers:
* - Dispatcher -> shared pool - Event based: ThreadPool or ForkJoin
* - PinnedDispatcher -> didicated per actor, 1 threadpool per actor
* - BalancingDispatcher -> shared and re-distributed work: busy or idle
* - CallingThreadDispatcher -> current thread - dont create new threads
*
*/
trait Message { var content:String="" }
case class Start() extends Message
case class Stop() extends Message
case class Ping() extends Message
case class Pong() extends Message
class PingActor(val pong:ActorRef) extends Actor {
def receive = {
case Start => println("ping") ; pong ! Ping
case Pong => println("ping") ; sender ! Ping
case Stop =>
println("STOP")
sender ! Stop
context.system.stop(self)
}
}
class PongActor extends Actor {
def receive = {
case Ping => println("pong") ; sender ! Pong
case Stop =>
val ref:ActorRef = self
context.system.stop(ref)
}
}
|
diegopacheco/scala-playground
|
scala_11_akka_23_full_playground/src/main/scala/com/github/diegopacheco/sandbox/scala/akka/dispatcher/DispatcherApp.scala
|
Scala
|
unlicense
| 1,319
|
/*
* Copyright 2010 LinkedIn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.javaapi.message
import java.nio.ByteBuffer
import kafka.common.ErrorMapping
import org.apache.log4j.Logger
import kafka.message._
class ByteBufferMessageSet(private val buffer: ByteBuffer,
private val initialOffset: Long = 0L,
private val errorCode: Int = ErrorMapping.NoError) extends MessageSet {
private val logger = Logger.getLogger(getClass())
val underlying: kafka.message.ByteBufferMessageSet = new kafka.message.ByteBufferMessageSet(buffer,
initialOffset,
errorCode)
def this(buffer: ByteBuffer) = this(buffer, 0L, ErrorMapping.NoError)
def this(compressionCodec: CompressionCodec, messages: java.util.List[Message]) {
this(compressionCodec match {
case NoCompressionCodec =>
val buffer = ByteBuffer.allocate(MessageSet.messageSetSize(messages))
val messageIterator = messages.iterator
while(messageIterator.hasNext) {
val message = messageIterator.next
message.serializeTo(buffer)
}
buffer.rewind
buffer
case _ =>
import scala.collection.JavaConversions._
val message = CompressionUtils.compress(asBuffer(messages), compressionCodec)
val buffer = ByteBuffer.allocate(message.serializedSize)
message.serializeTo(buffer)
buffer.rewind
buffer
}, 0L, ErrorMapping.NoError)
}
def this(messages: java.util.List[Message]) {
this(NoCompressionCodec, messages)
}
def validBytes: Long = underlying.validBytes
def serialized():ByteBuffer = underlying.serialized
def getInitialOffset = initialOffset
def getBuffer = buffer
def getErrorCode = errorCode
override def iterator: java.util.Iterator[MessageAndOffset] = new java.util.Iterator[MessageAndOffset] {
val underlyingIterator = underlying.iterator
override def hasNext(): Boolean = {
underlyingIterator.hasNext
}
override def next(): MessageAndOffset = {
underlyingIterator.next
}
override def remove = throw new UnsupportedOperationException("remove API on MessageSet is not supported")
}
override def toString: String = underlying.toString
def sizeInBytes: Long = underlying.sizeInBytes
override def equals(other: Any): Boolean = {
other match {
case that: ByteBufferMessageSet =>
(that canEqual this) && errorCode == that.errorCode && buffer.equals(that.buffer) && initialOffset == that.initialOffset
case _ => false
}
}
def canEqual(other: Any): Boolean = other.isInstanceOf[ByteBufferMessageSet]
override def hashCode: Int = 31 * (17 + errorCode) + buffer.hashCode + initialOffset.hashCode
}
|
tcrayford/hafka
|
kafka/core/src/main/scala/kafka/javaapi/message/ByteBufferMessageSet.scala
|
Scala
|
bsd-3-clause
| 3,456
|
package com.arcusys.valamis.updaters.version310.certificateHistory
import com.arcusys.valamis.persistence.common.DbNameUtils._
import com.arcusys.valamis.persistence.common.{SlickProfile, TypeMapper}
import com.arcusys.valamis.updaters.common.model.PeriodTypes
import com.arcusys.valamis.updaters.version310.model.certificate.CertificateStatuses
import org.joda.time.DateTime
trait CertificateHistoryTableComponent
extends TypeMapper { self: SlickProfile =>
import driver.api._
class UserStatusHistoryTable(tag: Tag) extends Table[UserStatusHistory](tag, tblName("CERT_STATE_HSTRY")) {
implicit val ValidPeriodTypeMapper = enumerationIdMapper(CertificateStatuses)
val certificateId = column[Long]("CERTIFICATE_ID")
val userId = column[Long]("USER_ID")
val date = column[DateTime]("DATE")
val isDeleted = column[Boolean]("IS_DELETED")
val status = column[CertificateStatuses.Value]("STATUS")
override def * = (
certificateId,
userId,
status,
date,
isDeleted) <> (UserStatusHistory.tupled, UserStatusHistory.unapply)
}
class CertificateHistoryTable(tag: Tag) extends Table[CertificateHistory](tag, tblName("CERTIFICATE_HSTRY")) {
implicit val ValidPeriodTypeMapper = enumerationIdMapper(PeriodTypes)
val certificateId = column[Long]("CERTIFICATE_ID")
val date = column[DateTime]("DATE")
val isDeleted = column[Boolean]("IS_DELETED")
val title = column[String]("TITLE")
val isPermanent = column[Boolean]("IS_PERMANENT")
val companyId = column[Long]("COMPANY_ID")
val validPeriodType = column[PeriodTypes.Value]("PERIOD_TPE")
val validPeriod = column[Int]("VALID_PERIOD")
val isPublished = column[Boolean]("IS_PUBLISHED")
val scope = column[Option[Long]]("SCOPE")
override def * = (
certificateId,
date,
isDeleted,
title,
isPermanent,
companyId,
validPeriodType,
validPeriod,
isPublished,
scope) <> (CertificateHistory.tupled, CertificateHistory.unapply)
}
lazy val certificateHistoryTQ = TableQuery[CertificateHistoryTable]
lazy val userStatusHistoryTQ = TableQuery[UserStatusHistoryTable]
}
|
arcusys/Valamis
|
valamis-updaters/src/main/scala/com/arcusys/valamis/updaters/version310/certificateHistory/CertificateHistoryTableComponent.scala
|
Scala
|
gpl-3.0
| 2,184
|
/*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jayway.awaitility.scala
import org.junit._
import org.junit.Assert._
import com.jayway.awaitility.Awaitility._
import java.util.concurrent.TimeUnit.MILLISECONDS
import com.jayway.awaitility.core.ConditionTimeoutException
@Test
class AwaitilitySupportTest extends AwaitilitySupport {
@Test
def functionAsCondition() = {
val c1 = new Counter()
val c2 = new Counter()
await until { c1.count() + c2.count() > 3 }
await until { isDone() }
await until isDone
}
@Test(expected = classOf[ConditionTimeoutException])
def timeout() = {
await atMost(500, MILLISECONDS) until { 2 == 1 }
}
@Test
def awaitWithAlias() = {
try {
await("scala") atMost(500, MILLISECONDS) until { 2 == 1 }
fail("Expected timeout exception")
} catch {
case e : ConditionTimeoutException =>
assertEquals("Condition with alias 'scala' didn't complete within 500 milliseconds because condition was not fulfilled.", e getMessage)
}
}
class Counter {
var value = 0
def count() = {
value = value + 1
value
}
}
def isDone() : Boolean = true
var c = 0
def count() = {
c = c + 1
c
}
}
|
tkrueger/awaitility
|
awaitility-scala/src/test/scala/com/jayway/awaitility/scala/AwaitilitySupportTest.scala
|
Scala
|
apache-2.0
| 1,825
|
package io.scalajs.nodejs
package zlib
import scala.scalajs.js
/**
* Decompress a raw deflate stream.
* @author lawrence.daniels@gmail.com
*/
@js.native
trait InflateRaw extends CompressionAlgorithm
|
scalajs-io/nodejs
|
app/common/src/main/scala/io/scalajs/nodejs/zlib/InflateRaw.scala
|
Scala
|
apache-2.0
| 206
|
package chandu0101.scalajs.react.components.reactbootstrap
import japgolly.scalajs.react.ReactComponentU_
import japgolly.scalajs.react.vdom.prefix_<^._
import japgolly.scalajs.react.React
import scala.scalajs.js
/**
* Created by chandrasekharkode on 11/29/14.
*
* Its wrapper for reactbootstrap , you need js source file from
* Warning : its not currently supported
*
*/
object ReactBootstrap extends js.Object {
def Button: js.Dynamic = js.native
def Accordion: js.Dynamic = js.native
def Affix: js.Dynamic = js.native
def Alert: js.Dynamic = js.native
def Badge: js.Dynamic = js.native
def ButtonGroup: js.Dynamic = js.native
def ButtonToolbar: js.Dynamic = js.native
def Carousel: js.Dynamic = js.native
def Col: js.Dynamic = js.native
def DropdownButton: js.Dynamic = js.native
def DropdownMenu: js.Dynamic = js.native
def Glyphicon: js.Dynamic = js.native
def Grid: js.Dynamic = js.native
def Input: js.Dynamic = js.native
def Interpolate : js.Dynamic = js.native
def Jumbotron : js.Dynamic = js.native
def Label : js.Dynamic = js.native
def ListGroup : js.Dynamic = js.native
def ListGroupItem : js.Dynamic = js.native
def MenuItem : js.Dynamic = js.native
def Modal : js.Dynamic = js.native
def ModalTrigger : js.Dynamic = js.native
def Nav : js.Dynamic = js.native
def NavItem : js.Dynamic = js.native
def Navbar : js.Dynamic = js.native
def OverlayTrigger : js.Dynamic = js.native
def PageHeader : js.Dynamic = js.native
def PageItem : js.Dynamic = js.native
def Pager : js.Dynamic = js.native
def Panel : js.Dynamic = js.native
def PanelGroup : js.Dynamic = js.native
def Popover : js.Dynamic = js.native
def ProgressBar : js.Dynamic = js.native
def Row : js.Dynamic = js.native
def SplitButton : js.Dynamic = js.native
def propTypes : js.Dynamic = js.native
def SubNav : js.Dynamic = js.native
def TabPane : js.Dynamic = js.native
def TabbedArea : js.Dynamic = js.native
def Table : js.Dynamic = js.native
def Tooltip : js.Dynamic = js.native
def Well : js.Dynamic = js.native
}
object bootStrap {
case class Button(bsSize: js.UndefOr[String] = js.undefined, onClick : js.UndefOr[js.Function0[Unit]] = js.undefined,
navDropdown: js.UndefOr[Boolean] = js.undefined, block: js.UndefOr[Boolean] = js.undefined, disabled: js.UndefOr[Boolean] = js.undefined, bsStyle: js.UndefOr[String] = js.undefined, componentClass: js.UndefOr[String] = js.undefined, navItem: js.UndefOr[Boolean] = js.undefined, bsClass: js.UndefOr[String] = js.undefined, active: js.UndefOr[Boolean] = js.undefined ,hrefB : js.UndefOr[String] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
active.foreach(v => p.updateDynamic("active")(v))
hrefB.foreach(v => p.updateDynamic("href")(v))
navItem.foreach(v => p.updateDynamic("navItem")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
navDropdown.foreach(v => p.updateDynamic("navDropdown")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
disabled.foreach(v => p.updateDynamic("disabled")(v))
block.foreach(v => p.updateDynamic("block")(v))
onClick.foreach(v => p.updateDynamic("block")(v))
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
componentClass.foreach(v => p.updateDynamic("componentClass")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Button)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class Accordion() {
def toJs: js.Object = {
val p = js.Dynamic.literal()
p
}
def apply(): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Accordion)
f(toJs).asInstanceOf[ReactComponentU_]
}
}
case class Affix(offset: js.UndefOr[Double] = js.undefined, offsetTop: js.UndefOr[Double] = js.undefined, offsetBottom: js.UndefOr[Double] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
offset.foreach(v => p.updateDynamic("offset")(v))
offsetTop.foreach(v => p.updateDynamic("offsetTop")(v))
offsetBottom.foreach(v => p.updateDynamic("offsetBottom")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Affix)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
//TODO onDismiss: React.PropTypes.func,
case class Alert(bsClass: js.UndefOr[String] = js.undefined, bsStyle: js.UndefOr[String] = js.undefined, bsSize: js.UndefOr[String] = js.undefined, dismissAfter: js.UndefOr[Double] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
dismissAfter.foreach(v => p.updateDynamic("dismissAfter")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Alert)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class Badge(pullRight: js.UndefOr[Boolean] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
pullRight.foreach(v => p.updateDynamic("pullRight")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Badge)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class ButtonGroup(justified: js.UndefOr[Boolean] = js.undefined, bsSize: js.UndefOr[String] = js.undefined, vertical: js.UndefOr[Boolean] = js.undefined, bsStyle: js.UndefOr[String] = js.undefined, bsClass: js.UndefOr[String] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
justified.foreach(v => p.updateDynamic("justified")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
vertical.foreach(v => p.updateDynamic("vertical")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.ButtonGroup)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class ButtonToolbar(bsClass: js.UndefOr[String] = js.undefined, bsStyle: js.UndefOr[String] = js.undefined, bsSize: js.UndefOr[String] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.ButtonToolbar)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
//TODO onSelect: React.PropTypes.func,
// onSlideEnd: React.PropTypes.func,
case class Carousel(bsSize: js.UndefOr[String] = js.undefined, defaultActiveIndex: js.UndefOr[Double] = js.undefined, onSlideEnd: js.UndefOr[js.Any] = js.undefined, direction: js.UndefOr[String] = js.undefined, wrap: js.UndefOr[Boolean] = js.undefined, slide: js.UndefOr[Boolean] = js.undefined, indicators: js.UndefOr[Boolean] = js.undefined, activeIndex: js.UndefOr[Double] = js.undefined, pauseOnHover: js.UndefOr[Boolean] = js.undefined, onSelect: js.UndefOr[js.Any] = js.undefined, bsStyle: js.UndefOr[String] = js.undefined, bsClass: js.UndefOr[String] = js.undefined, controls: js.UndefOr[Boolean] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
controls.foreach(v => p.updateDynamic("controls")(v))
onSelect.foreach(v => p.updateDynamic("onSelect")(v))
indicators.foreach(v => p.updateDynamic("indicators")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
defaultActiveIndex.foreach(v => p.updateDynamic("defaultActiveIndex")(v))
activeIndex.foreach(v => p.updateDynamic("activeIndex")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
slide.foreach(v => p.updateDynamic("slide")(v))
pauseOnHover.foreach(v => p.updateDynamic("pauseOnHover")(v))
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
direction.foreach(v => p.updateDynamic("direction")(v))
wrap.foreach(v => p.updateDynamic("wrap")(v))
onSlideEnd.foreach(v => p.updateDynamic("onSlideEnd")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Carousel)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class Col(xs: js.UndefOr[Double] = js.undefined, lgPush: js.UndefOr[Double] = js.undefined, xsOffset: js.UndefOr[Double] = js.undefined, smPush: js.UndefOr[Double] = js.undefined, xsPull: js.UndefOr[Double] = js.undefined, xsPush: js.UndefOr[Double] = js.undefined, smOffset: js.UndefOr[Double] = js.undefined, lg: js.UndefOr[Double] = js.undefined, sm: js.UndefOr[Double] = js.undefined, lgPull: js.UndefOr[Double] = js.undefined, mdOffset: js.UndefOr[Double] = js.undefined, componentClass: js.UndefOr[String], mdPush: js.UndefOr[Double] = js.undefined, md: js.UndefOr[Double] = js.undefined, lgOffset: js.UndefOr[Double] = js.undefined, smPull: js.UndefOr[Double] = js.undefined, mdPull: js.UndefOr[Double] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
lgPush.foreach(v => p.updateDynamic("lgPush")(v))
lg.foreach(v => p.updateDynamic("lg")(v))
mdOffset.foreach(v => p.updateDynamic("mdOffset")(v))
mdPush.foreach(v => p.updateDynamic("mdPush")(v))
sm.foreach(v => p.updateDynamic("sm")(v))
lgPull.foreach(v => p.updateDynamic("lgPull")(v))
xsOffset.foreach(v => p.updateDynamic("xsOffset")(v))
smPull.foreach(v => p.updateDynamic("smPull")(v))
lgOffset.foreach(v => p.updateDynamic("lgOffset")(v))
xsPush.foreach(v => p.updateDynamic("xsPush")(v))
md.foreach(v => p.updateDynamic("md")(v))
smOffset.foreach(v => p.updateDynamic("smOffset")(v))
mdPull.foreach(v => p.updateDynamic("mdPull")(v))
smPush.foreach(v => p.updateDynamic("smPush")(v))
componentClass.foreach(v => p.updateDynamic("componentClass")(v))
xs.foreach(v => p.updateDynamic("xs")(v))
xsPull.foreach(v => p.updateDynamic("xsPull")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Col)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
// TODO onClick: React.PropTypes.func,
// onSelect: React.PropTypes.func,
case class DropdownButton(bsSize: js.UndefOr[String] = js.undefined, onClick: js.UndefOr[js.Any] = js.undefined, dropup: js.UndefOr[Boolean] = js.undefined, onSelect: js.UndefOr[js.Any] = js.undefined, bsStyle: js.UndefOr[String] = js.undefined, title: js.UndefOr[String] = js.undefined, href: js.UndefOr[String] = js.undefined, navItem: js.UndefOr[Boolean] = js.undefined, pullRight: js.UndefOr[Boolean] = js.undefined, bsClass: js.UndefOr[String] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
onSelect.foreach(v => p.updateDynamic("onSelect")(v))
navItem.foreach(v => p.updateDynamic("navItem")(v))
pullRight.foreach(v => p.updateDynamic("pullRight")(v))
onClick.foreach(v => p.updateDynamic("onClick")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
href.foreach(v => p.updateDynamic("href")(v))
title.foreach(v => p.updateDynamic("title")(v))
dropup.foreach(v => p.updateDynamic("dropup")(v))
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.DropdownButton)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
//TODO onSelect: React.PropTypes.func
case class DropdownMenu(pullRight: js.UndefOr[Boolean] = js.undefined, onSelect: js.UndefOr[js.Any] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
pullRight.foreach(v => p.updateDynamic("pullRight")(v))
onSelect.foreach(v => p.updateDynamic("onSelect")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.DropdownMenu)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class Glyphicon(glyph: js.UndefOr[String]) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
glyph.foreach(v => p.updateDynamic("glyph")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Glyphicon)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
//TODO componentClass: React.PropTypes.node.isRequired
case class Grid(fluid: js.UndefOr[Boolean] = js.undefined, componentClass: js.UndefOr[String]) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
fluid.foreach(v => p.updateDynamic("fluid")(v))
componentClass.foreach(v => p.updateDynamic("componentClass")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Grid)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class input(groupClassName: js.UndefOr[String] = js.undefined, wrapperClassName: js.UndefOr[String] = js.undefined, labelB: js.UndefOr[String] = js.undefined, labelClassName: js.UndefOr[String] = js.undefined, addonBefore: js.UndefOr[String] = js.undefined, buttonAfter: js.UndefOr[String] = js.undefined, help: js.UndefOr[String] = js.undefined, buttonBefore: js.UndefOr[String] = js.undefined, hasFeedback: js.UndefOr[Boolean] = js.undefined, addonAfter: js.UndefOr[String] = js.undefined, disabledB: js.UndefOr[Boolean] = js.undefined, bsStyle: js.UndefOr[String] = js.undefined, typeB: js.UndefOr[String] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
labelB.foreach(v => p.updateDynamic("label")(v))
buttonBefore.foreach(v => p.updateDynamic("buttonBefore")(v))
groupClassName.foreach(v => p.updateDynamic("groupClassName")(v))
help.foreach(v => p.updateDynamic("help")(v))
wrapperClassName.foreach(v => p.updateDynamic("wrapperClassName")(v))
hasFeedback.foreach(v => p.updateDynamic("hasFeedback")(v))
buttonAfter.foreach(v => p.updateDynamic("buttonAfter")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
addonAfter.foreach(v => p.updateDynamic("addonAfter")(v))
disabledB.foreach(v => p.updateDynamic("disabled")(v))
typeB.foreach(v => p.updateDynamic("type")(v))
addonBefore.foreach(v => p.updateDynamic("addonBefore")(v))
labelClassName.foreach(v => p.updateDynamic("labelClassName")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Input)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class Interpolate(format : js.UndefOr[String] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
format.foreach(v => p.updateDynamic("format")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Interpolate)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class Jumbotron() {
def toJs: js.Object = {
val p = js.Dynamic.literal()
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Jumbotron)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class label(bsClass : js.UndefOr[String] = js.undefined,bsStyle : js.UndefOr[String] = js.undefined,bsSize : js.UndefOr[String] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Label)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
//TODO onClick: React.PropTypes.func
case class ListGroup(onClick : js.UndefOr[js.Any] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
onClick.foreach(v => p.updateDynamic("onClick")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.ListGroup)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class ListGroupItem(bsSize : js.UndefOr[String] = js.undefined,onClick : js.UndefOr[js.Any] = js.undefined,header : js.UndefOr[String] = js.undefined,disabled : js.UndefOr[js.Any] = js.undefined,bsStyle : js.UndefOr[String] = js.undefined,eventKey : js.UndefOr[js.Any] = js.undefined,bsClass : js.UndefOr[String] = js.undefined,active : js.UndefOr[js.Any] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
active.foreach(v => p.updateDynamic("active")(v))
eventKey.foreach(v => p.updateDynamic("eventKey")(v))
onClick.foreach(v => p.updateDynamic("onClick")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
disabled.foreach(v => p.updateDynamic("disabled")(v))
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
header.foreach(v => p.updateDynamic("header")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.ListGroupItem)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class MenuItem(divider : js.UndefOr[Boolean] = js.undefined,
header : js.UndefOr[Boolean] = js.undefined,onSelect : js.UndefOr[js.Any] = js.undefined,title : js.UndefOr[String] = js.undefined,href : js.UndefOr[String] = js.undefined,eventKey : js.UndefOr[String]=js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
onSelect.foreach(v => p.updateDynamic("onSelect")(v))
divider.foreach(v => p.updateDynamic("divider")(v))
eventKey.foreach(v => p.updateDynamic("eventKey")(v))
href.foreach(v => p.updateDynamic("href")(v))
title.foreach(v => p.updateDynamic("title")(v))
header.foreach(v => p.updateDynamic("header")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.MenuItem)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class Modal(backdrop : js.UndefOr[String] = js.undefined,onRequestHide : js.UndefOr[js.Any],keyboard : js.UndefOr[Boolean] = js.undefined,bsSize : js.UndefOr[String] = js.undefined,closeButton : js.UndefOr[Boolean] = js.undefined,animation : js.UndefOr[Boolean] = js.undefined,bsStyle : js.UndefOr[String] = js.undefined,title : js.UndefOr[String] = js.undefined,bsClass : js.UndefOr[String] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
keyboard.foreach(v => p.updateDynamic("keyboard")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
title.foreach(v => p.updateDynamic("title")(v))
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
backdrop.foreach(v => p.updateDynamic("backdrop")(v))
animation.foreach(v => p.updateDynamic("animation")(v))
onRequestHide.foreach(v => p.updateDynamic("onRequestHide")(v))
closeButton.foreach(v => p.updateDynamic("closeButton")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Modal)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class ModalTrigger(modal : js.UndefOr[String]) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
modal.foreach(v => p.updateDynamic("modal")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.ModalTrigger)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class Nav(stacked : js.UndefOr[Boolean] = js.undefined,navbar : js.UndefOr[Boolean] = js.undefined,justified : js.UndefOr[Boolean] = js.undefined,defaultExpanded : js.UndefOr[Boolean] = js.undefined,expanded : js.UndefOr[Boolean] = js.undefined,collapsable : js.UndefOr[Boolean] = js.undefined,onSelect : js.UndefOr[js.Any] = js.undefined,bsStyle : js.UndefOr[String] = js.undefined,eventKey : js.UndefOr[js.Any]=js.undefined,right : js.UndefOr[Boolean] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
onSelect.foreach(v => p.updateDynamic("onSelect")(v))
navbar.foreach(v => p.updateDynamic("navbar")(v))
collapsable.foreach(v => p.updateDynamic("collapsable")(v))
right.foreach(v => p.updateDynamic("right")(v))
stacked.foreach(v => p.updateDynamic("stacked")(v))
expanded.foreach(v => p.updateDynamic("expanded")(v))
eventKey.foreach(v => p.updateDynamic("eventKey")(v))
justified.foreach(v => p.updateDynamic("justified")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
defaultExpanded.foreach(v => p.updateDynamic("defaultExpanded")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Nav)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class NavItem(bsSize : js.UndefOr[String] = js.undefined,disabled : js.UndefOr[Boolean] = js.undefined,onSelect : js.UndefOr[js.Any] = js.undefined,bsStyle : js.UndefOr[String] = js.undefined,title : js.UndefOr[String] = js.undefined,href : js.UndefOr[String] = js.undefined,eventKey : js.UndefOr[js.Any]=js.undefined,bsClass : js.UndefOr[String] = js.undefined,active : js.UndefOr[Boolean] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
active.foreach(v => p.updateDynamic("active")(v))
onSelect.foreach(v => p.updateDynamic("onSelect")(v))
eventKey.foreach(v => p.updateDynamic("eventKey")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
disabled.foreach(v => p.updateDynamic("disabled")(v))
href.foreach(v => p.updateDynamic("href")(v))
title.foreach(v => p.updateDynamic("title")(v))
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.NavItem)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class Navbar(onToggle : js.UndefOr[js.Any] = js.undefined,toggleButton : js.UndefOr[String] = js.undefined,bsSize : js.UndefOr[String] = js.undefined,fluid : js.UndefOr[Boolean] = js.undefined,role : js.UndefOr[String] = js.undefined,fixedTop : js.UndefOr[Boolean] = js.undefined,staticTop : js.UndefOr[Boolean] = js.undefined,navExpanded : js.UndefOr[Boolean] = js.undefined,brand : js.UndefOr[String] = js.undefined,inverse : js.UndefOr[Boolean] = js.undefined,fixedBottom : js.UndefOr[Boolean] = js.undefined,defaultNavExpanded : js.UndefOr[Boolean] = js.undefined,bsStyle : js.UndefOr[String] = js.undefined,componentClass : js.UndefOr[String],bsClass : js.UndefOr[String] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
defaultNavExpanded.foreach(v => p.updateDynamic("defaultNavExpanded")(v))
navExpanded.foreach(v => p.updateDynamic("navExpanded")(v))
inverse.foreach(v => p.updateDynamic("inverse")(v))
onToggle.foreach(v => p.updateDynamic("onToggle")(v))
staticTop.foreach(v => p.updateDynamic("staticTop")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
fixedBottom.foreach(v => p.updateDynamic("fixedBottom")(v))
fixedTop.foreach(v => p.updateDynamic("fixedTop")(v))
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
componentClass.foreach(v => p.updateDynamic("componentClass")(v))
role.foreach(v => p.updateDynamic("role")(v))
toggleButton.foreach(v => p.updateDynamic("toggleButton")(v))
fluid.foreach(v => p.updateDynamic("fluid")(v))
brand.foreach(v => p.updateDynamic("brand")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Navbar)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class OverlayTrigger(delay : js.UndefOr[Double] = js.undefined,overlay : js.UndefOr[ReactComponentU_],delayShow : js.UndefOr[Double] = js.undefined,delayHide : js.UndefOr[Double] = js.undefined,placement : js.UndefOr[String] = js.undefined,defaultOverlayShown : js.UndefOr[Boolean] = js.undefined,trigger : js.UndefOr[String] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
delayShow.foreach(v => p.updateDynamic("delayShow")(v))
placement.foreach(v => p.updateDynamic("placement")(v))
delayHide.foreach(v => p.updateDynamic("delayHide")(v))
trigger.foreach(v => p.updateDynamic("trigger")(v))
overlay.foreach(v => p.updateDynamic("overlay")(v))
defaultOverlayShown.foreach(v => p.updateDynamic("defaultOverlayShown")(v))
delay.foreach(v => p.updateDynamic("delay")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.OverlayTrigger)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class PageHeader() {
def toJs: js.Object = {
val p = js.Dynamic.literal()
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.PageHeader)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class PageItem(previous : js.UndefOr[Boolean] = js.undefined,next : js.UndefOr[Boolean] = js.undefined,disabled : js.UndefOr[Boolean] = js.undefined,onSelect : js.UndefOr[js.Any] = js.undefined,eventKey : js.UndefOr[js.Any]=js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
onSelect.foreach(v => p.updateDynamic("onSelect")(v))
eventKey.foreach(v => p.updateDynamic("eventKey")(v))
next.foreach(v => p.updateDynamic("next")(v))
disabled.foreach(v => p.updateDynamic("disabled")(v))
previous.foreach(v => p.updateDynamic("previous")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.PageItem)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class Pager(onSelect : js.UndefOr[js.Any] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
onSelect.foreach(v => p.updateDynamic("onSelect")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Pager)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class Panel(bsSize : js.UndefOr[String] = js.undefined,defaultExpanded : js.UndefOr[Boolean] = js.undefined,
expanded : js.UndefOr[Boolean] = js.undefined,collapsable : js.UndefOr[Boolean] = js.undefined,
header : js.UndefOr[String] = js.undefined,onSelect : js.UndefOr[js.Any] = js.undefined,bsStyle : js.UndefOr[String] = js.undefined,eventKey : js.UndefOr[js.Any]=js.undefined,footer : js.UndefOr[String] = js.undefined,bsClass : js.UndefOr[String] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
onSelect.foreach(v => p.updateDynamic("onSelect")(v))
collapsable.foreach(v => p.updateDynamic("collapsable")(v))
expanded.foreach(v => p.updateDynamic("expanded")(v))
eventKey.foreach(v => p.updateDynamic("eventKey")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
footer.foreach(v => p.updateDynamic("footer")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
header.foreach(v => p.updateDynamic("header")(v))
defaultExpanded.foreach(v => p.updateDynamic("defaultExpanded")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Panel)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class PanelGroup(activeKey : js.UndefOr[js.Any]=js.undefined,bsSize : js.UndefOr[String] = js.undefined,collapsable : js.UndefOr[Boolean] = js.undefined,defaultActiveKey : js.UndefOr[js.Any]=js.undefined,onSelect : js.UndefOr[js.Any] = js.undefined,bsStyle : js.UndefOr[String] = js.undefined,bsClass : js.UndefOr[String] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
activeKey.foreach(v => p.updateDynamic("activeKey")(v))
defaultActiveKey.foreach(v => p.updateDynamic("defaultActiveKey")(v))
onSelect.foreach(v => p.updateDynamic("onSelect")(v))
collapsable.foreach(v => p.updateDynamic("collapsable")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.PanelGroup)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class Popover(bsSize : js.UndefOr[String] = js.undefined,positionLeft : js.UndefOr[Double] = js.undefined,positionTop : js.UndefOr[Double] = js.undefined,arrowOffsetLeft : js.UndefOr[Double] = js.undefined,placement : js.UndefOr[String] = js.undefined,bsStyle : js.UndefOr[String] = js.undefined,title : js.UndefOr[String] = js.undefined,bsClass : js.UndefOr[String] = js.undefined,arrowOffsetTop : js.UndefOr[Double] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
arrowOffsetLeft.foreach(v => p.updateDynamic("arrowOffsetLeft")(v))
placement.foreach(v => p.updateDynamic("placement")(v))
positionTop.foreach(v => p.updateDynamic("positionTop")(v))
positionLeft.foreach(v => p.updateDynamic("positionLeft")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
title.foreach(v => p.updateDynamic("title")(v))
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
arrowOffsetTop.foreach(v => p.updateDynamic("arrowOffsetTop")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Popover)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class ProgressBar(label : js.UndefOr[String] = js.undefined,
bsStyle : js.UndefOr[String] = js.undefined,
key : js.UndefOr[Int] = js.undefined,
striped : js.UndefOr[Boolean] = js.undefined,min : js.UndefOr[Double] = js.undefined,now : js.UndefOr[Double] = js.undefined,max : js.UndefOr[Double] = js.undefined,srOnly : js.UndefOr[Boolean] = js.undefined,active : js.UndefOr[Boolean] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
label.foreach(v => p.updateDynamic("label")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
key.foreach(v => p.updateDynamic("key")(v))
active.foreach(v => p.updateDynamic("active")(v))
max.foreach(v => p.updateDynamic("max")(v))
striped.foreach(v => p.updateDynamic("striped")(v))
now.foreach(v => p.updateDynamic("now")(v))
srOnly.foreach(v => p.updateDynamic("srOnly")(v))
min.foreach(v => p.updateDynamic("min")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.ProgressBar)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class Row(componentClass : js.UndefOr[String]) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
componentClass.foreach(v => p.updateDynamic("componentClass")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Row)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class SplitButton(bsSize : js.UndefOr[String] = js.undefined,onClick : js.UndefOr[js.Any] = js.undefined,dropdownTitle : js.UndefOr[String] = js.undefined,disabled : js.UndefOr[Boolean] = js.undefined,onSelect : js.UndefOr[js.Any] = js.undefined,bsStyle : js.UndefOr[String] = js.undefined,title : js.UndefOr[String] = js.undefined,href : js.UndefOr[String] = js.undefined,pullRight : js.UndefOr[Boolean] = js.undefined,bsClass : js.UndefOr[String] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
onSelect.foreach(v => p.updateDynamic("onSelect")(v))
dropdownTitle.foreach(v => p.updateDynamic("dropdownTitle")(v))
pullRight.foreach(v => p.updateDynamic("pullRight")(v))
onClick.foreach(v => p.updateDynamic("onClick")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
disabled.foreach(v => p.updateDynamic("disabled")(v))
href.foreach(v => p.updateDynamic("href")(v))
title.foreach(v => p.updateDynamic("title")(v))
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.SplitButton)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class propTypes(bsSize : js.UndefOr[String] = js.undefined,text : js.UndefOr[String] = js.undefined,disabled : js.UndefOr[Boolean] = js.undefined,onSelect : js.UndefOr[js.Any] = js.undefined,bsStyle : js.UndefOr[String] = js.undefined,title : js.UndefOr[String] = js.undefined,href : js.UndefOr[String] = js.undefined,bsClass : js.UndefOr[String] = js.undefined,active : js.UndefOr[Boolean] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
active.foreach(v => p.updateDynamic("active")(v))
text.foreach(v => p.updateDynamic("text")(v))
onSelect.foreach(v => p.updateDynamic("onSelect")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
disabled.foreach(v => p.updateDynamic("disabled")(v))
href.foreach(v => p.updateDynamic("href")(v))
title.foreach(v => p.updateDynamic("title")(v))
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.propTypes)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class SubNav(bsSize : js.UndefOr[String] = js.undefined,text : js.UndefOr[String] = js.undefined,disabled : js.UndefOr[Boolean] = js.undefined,onSelect : js.UndefOr[js.Any] = js.undefined,bsStyle : js.UndefOr[String] = js.undefined,title : js.UndefOr[String] = js.undefined,href : js.UndefOr[String] = js.undefined,bsClass : js.UndefOr[String] = js.undefined,active : js.UndefOr[Boolean] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
active.foreach(v => p.updateDynamic("active")(v))
text.foreach(v => p.updateDynamic("text")(v))
onSelect.foreach(v => p.updateDynamic("onSelect")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
disabled.foreach(v => p.updateDynamic("disabled")(v))
href.foreach(v => p.updateDynamic("href")(v))
title.foreach(v => p.updateDynamic("title")(v))
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.SubNav)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class TabPane() {
def toJs: js.Object = {
val p = js.Dynamic.literal()
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.TabPane)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class TabbedArea(bsSize : js.UndefOr[String] = js.undefined,animation : js.UndefOr[Boolean] = js.undefined,onSelect : js.UndefOr[js.Any] = js.undefined,bsStyle : js.UndefOr[String] = js.undefined,bsClass : js.UndefOr[String] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
onSelect.foreach(v => p.updateDynamic("onSelect")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
animation.foreach(v => p.updateDynamic("animation")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.TabbedArea)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class table(bordered : js.UndefOr[Boolean] = js.undefined,hover : js.UndefOr[Boolean] = js.undefined,striped : js.UndefOr[Boolean] = js.undefined,condensed : js.UndefOr[Boolean] = js.undefined,responsive : js.UndefOr[Boolean] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
bordered.foreach(v => p.updateDynamic("bordered")(v))
striped.foreach(v => p.updateDynamic("striped")(v))
condensed.foreach(v => p.updateDynamic("condensed")(v))
responsive.foreach(v => p.updateDynamic("responsive")(v))
hover.foreach(v => p.updateDynamic("hover")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Table)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class Tooltip(bsSize : js.UndefOr[String] = js.undefined,positionLeft : js.UndefOr[Double] = js.undefined,positionTop : js.UndefOr[Double] = js.undefined,arrowOffsetLeft : js.UndefOr[Double] = js.undefined,placement : js.UndefOr[String] = js.undefined,bsStyle : js.UndefOr[String] = js.undefined,bsClass : js.UndefOr[String] = js.undefined,arrowOffsetTop : js.UndefOr[Double] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
arrowOffsetLeft.foreach(v => p.updateDynamic("arrowOffsetLeft")(v))
placement.foreach(v => p.updateDynamic("placement")(v))
positionTop.foreach(v => p.updateDynamic("positionTop")(v))
positionLeft.foreach(v => p.updateDynamic("positionLeft")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
arrowOffsetTop.foreach(v => p.updateDynamic("arrowOffsetTop")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Tooltip)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
case class Well(bsClass : js.UndefOr[String] = js.undefined,bsStyle : js.UndefOr[String] = js.undefined,bsSize : js.UndefOr[String] = js.undefined) {
def toJs: js.Object = {
val p = js.Dynamic.literal()
bsClass.foreach(v => p.updateDynamic("bsClass")(v))
bsStyle.foreach(v => p.updateDynamic("bsStyle")(v))
bsSize.foreach(v => p.updateDynamic("bsSize")(v))
p
}
def apply(children: TagMod*): ReactComponentU_ = {
val f = React.asInstanceOf[js.Dynamic].createFactory(ReactBootstrap.Well)
f(toJs, js.Array(children: _*)).asInstanceOf[ReactComponentU_]
}
}
}
|
coreyauger/scalajs-react-components
|
core/src/main/scala/chandu0101/scalajs/react/components/reactbootstrap/Bootstrap.scala
|
Scala
|
apache-2.0
| 41,721
|
/**
* Copyright (C) 2011 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.control.controls
import org.orbeon.dom
import org.orbeon.oxf.common.Version
import org.orbeon.oxf.test.{DocumentTestBase, ResourceManagerSupport}
import org.orbeon.oxf.xml.Dom4j.elemToDocument
import org.scalatest.FunSpecLike
class SubmissionHeadersTest
extends DocumentTestBase
with ResourceManagerSupport
with FunSpecLike {
describe("Submission headers") {
val doc: dom.Document =
<xh:html
xmlns:xf="http://www.w3.org/2002/xforms"
xmlns:xh="http://www.w3.org/1999/xhtml"
xmlns:xxf="http://orbeon.org/oxf/xml/xforms">
<xh:head>
<xf:model xxf:xpath-analysis="true">
<xf:instance id="instance">
<value/>
</xf:instance>
<xf:instance id="headers">
<headers>
<header name="Header1" value="value1"/>
<header name="Header2" value="value2"/>
<header name="Header3" value="value3"/>
</headers>
</xf:instance>
</xf:model>
</xh:head>
<xh:body>
<xf:output id="output1" ref="instance()" mediatype="image/*">
<xf:header ref="instance('headers')/header">
<xf:name value="@name"/>
<xf:value value="@value"/>
</xf:header>
</xf:output>
<xf:output id="output2" ref="instance()" mediatype="image/*">
<!-- Original headers -->
<xf:header ref="instance('headers')/header">
<xf:name value="@name"/>
<xf:value value="@value"/>
</xf:header>
<!-- Prepend to 1 header -->
<xf:header combine="prepend">
<xf:name>Header1</xf:name>
<xf:value>prepend1</xf:value>
</xf:header>
<!-- Append to 1 header -->
<xf:header combine="append">
<xf:name>Header2</xf:name>
<xf:value>append2</xf:value>
</xf:header>
<!-- Replace 1 header -->
<xf:header combine="replace">
<xf:name>Header3</xf:name>
<xf:value>replace3</xf:value>
</xf:header>
<!-- Prepend to 3 headers -->
<xf:header combine="prepend" ref="instance('headers')/header">
<xf:name value="@name"/>
<xf:value>prepend2</xf:value>
</xf:header>
<!-- Append to 3 headers -->
<xf:header combine="append" ref="instance('headers')/header">
<xf:name value="@name"/>
<xf:value>append2</xf:value>
</xf:header>
<!-- Additional header -->
<xf:header>
<xf:name>Header4</xf:name>
<xf:value>value4</xf:value>
</xf:header>
</xf:output>
</xh:body>
</xh:html>
// Expected results per control
val expected = List(
"output1" → List(
"Header1" → List("value1"),
"Header2" → List("value2"),
"Header3" → List("value3")
),
"output2" → List(
"Header1" → List("prepend2", "prepend1", "value1", "append2"),
"Header2" → List("prepend2", "value2", "append2", "append2"),
"Header3" → List("prepend2", "replace3", "append2"),
"Header4" → List("value4")
)
)
it("must be evaluated following prepend/append/replace rules") {
assume(Version.isPE) // because of `xxf:xpath-analysis="true"`
withXFormsDocument(doc) { xfcd ⇒
for {
(controlId, expectedHeaders) ← expected
control = xfcd.getObjectByEffectiveId(controlId).asInstanceOf[XFormsOutputControl]
actualHeaders = control.evaluatedHeaders
(expectedHeaderName, expectedHeaderValues) ← expectedHeaders
} locally {
assert(expectedHeaderValues === actualHeaders(expectedHeaderName))
}
}
}
}
}
|
brunobuzzi/orbeon-forms
|
xforms/jvm/src/test/scala/org/orbeon/oxf/xforms/control/controls/SubmissionHeadersTest.scala
|
Scala
|
lgpl-2.1
| 4,643
|
/**
* Copyright (C) 2014 TU Berlin (peel@dima.tu-berlin.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.peelframework.core.beans.data
import java.io.FileNotFoundException
import java.nio.file.{Files, Paths}
import org.peelframework.core.beans.system.{FileSystem, System}
/** Dataset that is copied from a local filesystem to a specified target location.
*
* If the data already exists at the specified location, it is '''not''' copied again!
*
* @param src Local path where the data is stored.
* @param dst Path in the distributed filesystem where the data is stored.
* @param fs The filesystem that is used.
*/
class CopiedDataSet(val src: String, val dst: String, val fs: System with FileSystem) extends DataSet(dst, Set[System](fs)) {
import scala.language.implicitConversions
override def materialize() = {
// resolve parameters from the current config in src and dst
val dst = resolve(this.dst)
val src = resolve(this.src)
logger.info(s"Copying data set '$src' to '$dst'")
if (fs.copyFromLocal(src, dst) != 0) throw new RuntimeException(s"Could not copy '$src' to '$dst'")
}
}
|
peelframework/peel
|
peel-core/src/main/scala/org/peelframework/core/beans/data/CopiedDataSet.scala
|
Scala
|
apache-2.0
| 1,663
|
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.filter.visitor
import java.util.regex.Pattern
import java.util.{Collections, Date}
import org.geotools.filter.LikeToRegexConverter
import org.geotools.filter.text.ecql.ECQL
import org.geotools.filter.visitor.{DuplicatingFilterVisitor, ExpressionTypeVisitor, IsStaticExpressionVisitor}
import org.locationtech.geomesa.filter.{FilterHelper, GeometryProcessing}
import org.locationtech.geomesa.utils.geotools.converters.FastConverter
import org.opengis.feature.`type`.AttributeDescriptor
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter._
import org.opengis.filter.expression._
import org.opengis.filter.spatial._
import org.opengis.filter.temporal._
import org.opengis.temporal.Period
import scala.util.{Success, Try}
/**
* Updates filters to handle namespaces, default property names, IDL, dwithin units,
* type binding, and to remove filters that aren't meaningful
*/
class QueryPlanFilterVisitor(sft: SimpleFeatureType) extends DuplicatingFilterVisitor {
import FilterHelper.isFilterWholeWorld
import org.locationtech.geomesa.utils.geotools.RichAttributeDescriptors.RichAttributeDescriptor
import scala.collection.JavaConverters._
private val typeVisitor = new ExpressionTypeVisitor(sft) {
override def visit(expression: PropertyName, extraData: AnyRef): AnyRef = {
val descriptor = expression.evaluate(sft, classOf[AttributeDescriptor])
// json attributes can return anything due to json path evaluation, so to avoid binding incorrectly
// we return classOf[Object] here
// we have to re-fetch the original descriptor as the json property accessor strips out the json flag
// to prevent transform serialization issues
if (descriptor == null || sft.getDescriptor(descriptor.getLocalName).isJson) {
classOf[Object]
} else {
descriptor.getType.getBinding
}
}
}
override def visit(f: Or, data: AnyRef): AnyRef = {
val children = new java.util.ArrayList[Filter](f.getChildren.size)
var i = 0
while (i < f.getChildren.size) {
val child = f.getChildren.get(i).accept(this, data).asInstanceOf[Filter]
if (child == Filter.INCLUDE) {
// INCLUDE OR foo == INCLUDE
return Filter.INCLUDE
} else if (child != Filter.EXCLUDE) {
// EXCLUDE OR foo == foo
children.add(child)
}
i += 1
}
children.size() match {
case 0 => Filter.EXCLUDE
case 1 => children.get(0)
case _ => getFactory(data).or(children)
}
}
override def visit(f: And, data: AnyRef): AnyRef = {
val children = new java.util.ArrayList[Filter](f.getChildren.size)
var i = 0
while (i < f.getChildren.size) {
val child = f.getChildren.get(i).accept(this, data).asInstanceOf[Filter]
if (child == Filter.EXCLUDE) {
// EXCLUDE AND foo == EXCLUDE
return Filter.EXCLUDE
} else if (child != Filter.INCLUDE) {
// INCLUDE AND foo == foo
children.add(child)
}
i += 1
}
children.size() match {
case 0 => Filter.INCLUDE
case 1 => children.get(0)
case _ => getFactory(data).and(children)
}
}
// note: for the following filters, we call super.visit first to handle any property names
override def visit(f: DWithin, data: AnyRef): AnyRef =
if (isFilterWholeWorld(f)) { Filter.INCLUDE } else {
GeometryProcessing.process(super.visit(f, data).asInstanceOf[BinarySpatialOperator], sft, getFactory(data))
}
override def visit(f: BBOX, data: AnyRef): AnyRef =
if (isFilterWholeWorld(f)) { Filter.INCLUDE } else {
GeometryProcessing.process(super.visit(f, data).asInstanceOf[BinarySpatialOperator], sft, getFactory(data))
}
override def visit(f: Within, data: AnyRef): AnyRef =
if (isFilterWholeWorld(f)) { Filter.INCLUDE } else {
GeometryProcessing.process(super.visit(f, data).asInstanceOf[BinarySpatialOperator], sft, getFactory(data))
}
override def visit(f: Intersects, data: AnyRef): AnyRef =
if (isFilterWholeWorld(f)) { Filter.INCLUDE } else {
GeometryProcessing.process(super.visit(f, data).asInstanceOf[BinarySpatialOperator], sft, getFactory(data))
}
override def visit(f: Overlaps, data: AnyRef): AnyRef =
if (isFilterWholeWorld(f)) { Filter.INCLUDE } else {
GeometryProcessing.process(super.visit(f, data).asInstanceOf[BinarySpatialOperator], sft, getFactory(data))
}
override def visit(f: Contains, data: AnyRef): AnyRef =
if (isFilterWholeWorld(f)) { Filter.INCLUDE } else {
GeometryProcessing.process(super.visit(f, data).asInstanceOf[BinarySpatialOperator], sft, getFactory(data))
}
override def visit(expression: PropertyName, extraData: AnyRef): AnyRef = {
val name = expression.getPropertyName
if (name == null || name.isEmpty) {
// use the default geometry name
val geomName = sft.getGeometryDescriptor.getLocalName
getFactory(extraData).property(geomName, expression.getNamespaceContext)
} else {
val index = name.indexOf(':')
if (index == -1) {
getFactory(extraData).property(name)
} else {
// strip off the namespace
getFactory(extraData).property(name.substring(index + 1), expression.getNamespaceContext)
}
}
}
override def visit(filter: PropertyIsEqualTo, extraData: AnyRef): AnyRef = {
val target = binding(Seq(filter.getExpression1, filter.getExpression2))
if (target == null) { super.visit(filter, extraData) } else {
val e1 = bind(filter.getExpression1, extraData, target)
val e2 = bind(filter.getExpression2, extraData, target)
getFactory(extraData).equal(e1, e2, filter.isMatchingCase, filter.getMatchAction)
}
}
override def visit(filter: PropertyIsNotEqualTo, extraData: AnyRef): AnyRef = {
val target = binding(Seq(filter.getExpression1, filter.getExpression2))
if (target == null) { super.visit(filter, extraData) } else {
val e1 = bind(filter.getExpression1, extraData, target)
val e2 = bind(filter.getExpression2, extraData, target)
getFactory(extraData).notEqual(e1, e2, filter.isMatchingCase, filter.getMatchAction)
}
}
override def visit(filter: PropertyIsBetween, extraData: AnyRef): AnyRef = {
val target = binding(Seq(filter.getExpression, filter.getLowerBoundary, filter.getUpperBoundary))
if (target == null) { super.visit(filter, extraData) } else {
val e = bind(filter.getExpression, extraData, target)
val lb = bind(filter.getLowerBoundary, extraData, target)
val ub = bind(filter.getUpperBoundary, extraData, target)
getFactory(extraData).between(e, lb, ub, filter.getMatchAction)
}
}
override def visit(filter: PropertyIsGreaterThan, extraData: AnyRef): AnyRef = {
val target = binding(Seq(filter.getExpression1, filter.getExpression2))
if (target == null) { super.visit(filter, extraData) } else {
val e1 = bind(filter.getExpression1, extraData, target)
val e2 = bind(filter.getExpression2, extraData, target)
getFactory(extraData).greater(e1, e2, filter.isMatchingCase, filter.getMatchAction)
}
}
override def visit(filter: PropertyIsGreaterThanOrEqualTo, extraData: AnyRef): AnyRef = {
val target = binding(Seq(filter.getExpression1, filter.getExpression2))
if (target == null) { super.visit(filter, extraData) } else {
val e1 = bind(filter.getExpression1, extraData, target)
val e2 = bind(filter.getExpression2, extraData, target)
getFactory(extraData).greaterOrEqual(e1, e2, filter.isMatchingCase, filter.getMatchAction)
}
}
override def visit(filter: PropertyIsLessThan, extraData: AnyRef): AnyRef = {
val target = binding(Seq(filter.getExpression1, filter.getExpression2))
if (target == null) { super.visit(filter, extraData) } else {
val e1 = bind(filter.getExpression1, extraData, target)
val e2 = bind(filter.getExpression2, extraData, target)
getFactory(extraData).less(e1, e2, filter.isMatchingCase, filter.getMatchAction)
}
}
override def visit(filter: PropertyIsLessThanOrEqualTo, extraData: AnyRef): AnyRef = {
val target = binding(Seq(filter.getExpression1, filter.getExpression2))
if (target == null) { super.visit(filter, extraData) } else {
val e1 = bind(filter.getExpression1, extraData, target)
val e2 = bind(filter.getExpression2, extraData, target)
getFactory(extraData).lessOrEqual(e1, e2, filter.isMatchingCase, filter.getMatchAction)
}
}
override def visit(filter: After, extraData: AnyRef): AnyRef = {
val e1 = bindTemporal(filter.getExpression1, extraData)
val e2 = bindTemporal(filter.getExpression2, extraData)
getFactory(extraData).after(e1, e2, filter.getMatchAction)
}
override def visit(filter: AnyInteracts, extraData: AnyRef): AnyRef = {
val e1 = bindTemporal(filter.getExpression1, extraData)
val e2 = bindTemporal(filter.getExpression2, extraData)
getFactory(extraData).anyInteracts(e1, e2, filter.getMatchAction)
}
override def visit(filter: Before, extraData: AnyRef): AnyRef = {
val e1 = bindTemporal(filter.getExpression1, extraData)
val e2 = bindTemporal(filter.getExpression2, extraData)
getFactory(extraData).before(e1, e2, filter.getMatchAction)
}
override def visit(filter: Begins, extraData: AnyRef): AnyRef = {
val e1 = bindTemporal(filter.getExpression1, extraData)
val e2 = bindTemporal(filter.getExpression2, extraData)
getFactory(extraData).begins(e1, e2, filter.getMatchAction)
}
override def visit(filter: BegunBy, extraData: AnyRef): AnyRef = {
val e1 = bindTemporal(filter.getExpression1, extraData)
val e2 = bindTemporal(filter.getExpression2, extraData)
getFactory(extraData).begins(e1, e2, filter.getMatchAction)
}
override def visit(filter: During, extraData: AnyRef): AnyRef = {
val e1 = bindTemporal(filter.getExpression1, extraData)
val e2 = bindTemporal(filter.getExpression2, extraData)
getFactory(extraData).during(e1, e2, filter.getMatchAction)
}
override def visit(filter: EndedBy, extraData: AnyRef): AnyRef = {
val e1 = bindTemporal(filter.getExpression1, extraData)
val e2 = bindTemporal(filter.getExpression2, extraData)
getFactory(extraData).endedBy(e1, e2, filter.getMatchAction)
}
override def visit(filter: Ends, extraData: AnyRef): AnyRef = {
val e1 = bindTemporal(filter.getExpression1, extraData)
val e2 = bindTemporal(filter.getExpression2, extraData)
getFactory(extraData).ends(e1, e2, filter.getMatchAction)
}
override def visit(filter: Meets, extraData: AnyRef): AnyRef = {
val e1 = bindTemporal(filter.getExpression1, extraData)
val e2 = bindTemporal(filter.getExpression2, extraData)
getFactory(extraData).meets(e1, e2, filter.getMatchAction)
}
override def visit(filter: MetBy, extraData: AnyRef): AnyRef = {
val e1 = bindTemporal(filter.getExpression1, extraData)
val e2 = bindTemporal(filter.getExpression2, extraData)
getFactory(extraData).metBy(e1, e2, filter.getMatchAction)
}
override def visit(filter: OverlappedBy, extraData: AnyRef): AnyRef = {
val e1 = bindTemporal(filter.getExpression1, extraData)
val e2 = bindTemporal(filter.getExpression2, extraData)
getFactory(extraData).overlappedBy(e1, e2, filter.getMatchAction)
}
override def visit(filter: TContains, extraData: AnyRef): AnyRef = {
val e1 = bindTemporal(filter.getExpression1, extraData)
val e2 = bindTemporal(filter.getExpression2, extraData)
getFactory(extraData).tcontains(e1, e2, filter.getMatchAction)
}
override def visit(filter: TEquals, extraData: AnyRef): AnyRef = {
val e1 = bindTemporal(filter.getExpression1, extraData)
val e2 = bindTemporal(filter.getExpression2, extraData)
getFactory(extraData).tequals(e1, e2, filter.getMatchAction)
}
override def visit(filter: TOverlaps, extraData: AnyRef): AnyRef = {
val e1 = bindTemporal(filter.getExpression1, extraData)
val e2 = bindTemporal(filter.getExpression2, extraData)
getFactory(extraData).toverlaps(e1, e2, filter.getMatchAction)
}
override def visit(function: Function, extraData: AnyRef): AnyRef = {
val types = Option(function.getFunctionName).map(_.getArguments.iterator).getOrElse(Collections.emptyIterator())
val params = function.getParameters.asScala.map { parameter =>
if (types.hasNext) {
bind(parameter, extraData, types.next.getType)
} else {
visit(parameter, extraData)
}
}
function match {
case f: InternalFunction => f.duplicate(params: _*)
case f => getFactory(extraData).function(f.getName, params: _*)
}
}
override protected def visit(expression: Expression, extraData: AnyRef): Expression = {
if (expression.accept(IsStaticExpressionVisitor.VISITOR, null).asInstanceOf[Boolean]) {
Try(expression.evaluate(null)) match {
case Success(lit) if lit != null => getFactory(extraData).literal(lit)
case _ => super.visit(expression, extraData)
}
} else {
super.visit(expression, extraData)
}
}
override def visit(filter: PropertyIsLike, extraData: Any): AnyRef = {
try {
val pattern = new LikeToRegexConverter(filter).getPattern
Pattern.compile(pattern)
} catch {
case e: Exception =>
throw new IllegalArgumentException(s"The regex filter (${filter.getLiteral}) for the (i)like filter is invalid.", e)
}
super.visit(filter, extraData)
}
private def binding(expressions: Seq[Expression]): Class[_] = {
val bindings = expressions.flatMap {
case _: Literal => Seq.empty // don't consider literals, as we're trying to bind them to the right type
case e => Seq(e.accept(typeVisitor, null)).filter(_ != null)
}
bindings.distinct match {
case Seq(b) => b.asInstanceOf[Class[_]]
case _ => null // if not exactly one type, we can't bind it
}
}
private def bind(e: Expression, extraData: AnyRef, target: Class[_]): Expression = {
if (e.isInstanceOf[Literal]) {
val bound = FastConverter.convert(e.evaluate(null), target)
if (bound != null) {
return getFactory(extraData).literal(bound)
}
}
visit(e, extraData)
}
private def bindTemporal(e: Expression, extraData: AnyRef): Expression = {
if (e.isInstanceOf[Literal]) {
val lit = e.evaluate(null)
val bound = FastConverter.convertFirst[AnyRef](lit, Iterator(classOf[Period], classOf[Date]))
if (bound != null) {
return getFactory(extraData).literal(bound)
}
}
visit(e, extraData)
}
}
object QueryPlanFilterVisitor {
def apply(filter: Filter): Filter = filter.accept(new QueryPlanFilterVisitor(null), null).asInstanceOf[Filter]
def apply(sft: SimpleFeatureType, filter: Filter): Filter =
filter.accept(new QueryPlanFilterVisitor(sft), null).asInstanceOf[Filter]
}
|
aheyne/geomesa
|
geomesa-filter/src/main/scala/org/locationtech/geomesa/filter/visitor/QueryPlanFilterVisitor.scala
|
Scala
|
apache-2.0
| 15,450
|
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Benchmarks **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, Jonas Fonseca **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
/* Based on code from: http://norvig.com/sudopy.shtml */
package org.scalajs.benchmark.sudoku
import scala.language.implicitConversions
object Sudoku extends org.scalajs.benchmark.Benchmark {
override def prefix = "Sudoku"
def run {
solve(grid1) match {
case Some(values) =>
if (!grid1Solutions.contains(asString(values)))
println("Invalid solution found: " + asString(values))
case _ => println("No solution found")
}
}
def cross(as: String, bs: String) =
for (a <- as.map(_.toString); b <- bs.map(_.toString)) yield a + b
val digits = "123456789"
val rows = "ABCDEFGHI"
val cols = digits
val squares = cross(rows, cols)
val unitlist =
cols.map(_.toString).map(cross(rows, _)) ++
rows.map(_.toString).map(cross(_, cols)) ++
(for (rs <- List("ABC", "DEF", "GHI"); cs <- List("123", "456", "789")) yield cross(rs, cs))
val units = squares.map(s => (s, unitlist.filter(_.contains(s)))).toMap
val peers = squares.map(s => (s, units(s).flatten.toSet.filterNot(_ == s))).toMap
type Grid = scala.collection.mutable.Map[String, String]
val False = scala.collection.mutable.Map[String, String]()
implicit def gridToBoolean(grid: Grid): Boolean = grid.nonEmpty
// ################ Parse a Grid ################
def parseGrid(grid: String): Grid = {
val values = scala.collection.mutable.Map[String, String]()
values ++= squares.map(s => (s, digits)).toMap
val iter = gridValues(grid).iterator
while (iter.hasNext) {
val (s, d) = iter.next
if (digits.contains(d) && !assign(values, s, d))
return False
}
values
}
def gridValues(grid: String) = {
val chars = grid.map(_.toString).filter(c => digits.contains(c) || "0.".contains(c))
squares.zip(chars).toMap
}
// ################ Constraint Propagation ################
/* Eliminate all the other values (except d) from values[s] and propagate.
* Return values, except return False if a contradiction is detected. */
def assign(values: Grid, s: String, d: String): Grid = {
val otherValues = values(s).replace(d, "")
if (otherValues.forall(d2 => eliminate(values, s, d2.toString)))
values
else
False
}
/* Eliminate d from values[s]; propagate when values or places <= 2.
* Return values, except return False if a contradiction is detected. */
def eliminate(values: Grid, s: String, d: String): Grid = {
if (!values(s).contains(d))
return values // Already eliminated
values(s) = values(s).replace(d, "")
// (1) If a square s is reduced to one value d2, then eliminate d2 from the peers.
if (values(s).isEmpty) {
return False // Contradiction: removed last value
} else if (values(s).length == 1) {
val d2 = values(s)
if (!peers(s).forall(s2 => eliminate(values, s2, d2)))
return False
}
// (2) If a unit u is reduced to only one place for a value d, then put it there.
val iter = units(s).iterator
while (iter.hasNext) {
val u = iter.next
val dplaces = for (s <- u; if (values(s).contains(d))) yield s
if (dplaces.isEmpty)
return False // Contradiction: no place for d
if (dplaces.size == 1) {
if (!assign(values, dplaces(0), d))
return False
}
}
values
}
// ################ Unit Tests ################
val grid1 = "003020600900305001001806400008102900700000008006708200002609500800203009005010300"
val grid2 = "4.....8.5.3..........7......2.....6.....8.4......1.......6.3.7.5..2.....1.4......"
val hard1 = ".....6....59.....82....8....45........3........6..3.54...325..6.................."
val grid1Solutions = List(
"483921657967345821251876493548132976729564138136798245372689514814253769695417382")
val grid2Solutions = List(
"417369825632158947958724316825437169791586432346912758289643571573291684164875293")
val hard1Solutions = List(
"874196325359742618261538497145679832783254169926813754417325986598461273632987541",
"834596217659712438271438569745169382923854671186273954417325896562987143398641725")
def test() {
require(squares.length == 81)
require(unitlist.length == 27)
require(squares.forall(s => units(s).size == 3))
require(squares.forall(s => peers(s).size == 20))
require(units("C2") == Vector(Vector("A2", "B2", "C2", "D2", "E2", "F2", "G2", "H2", "I2"),
Vector("C1", "C2", "C3", "C4", "C5", "C6", "C7", "C8", "C9"),
Vector("A1", "A2", "A3", "B1", "B2", "B3", "C1", "C2", "C3")))
require(peers("C2") == Set("A2", "B2", "D2", "E2", "F2", "G2", "H2", "I2",
"C1", "C3", "C4", "C5", "C6", "C7", "C8", "C9",
"A1", "A3", "B1", "B3"))
println("All tests pass")
}
// ################ Display as 2-D grid ################
// Display these values as a 2-D grid.
def display(values: Grid) = {
val width = squares.map(values(_).length).max + 1
val line = (for (i <- 0 to 2) yield ("-" * width * 3)).mkString("+")
for (r <- rows.map(_.toString)) {
val cells = (for (c <- cols) yield center(values(r + c), width))
println(cells.sliding(3, 3).map(_.mkString).mkString("|"))
if ("CF".contains(r))
println(line)
}
println
}
def asString(values: Grid): String =
(for (r <- rows; c <- cols) yield values(r.toString + c.toString)).mkString
// ################ Search ################
def solve(grid: String) = search(parseGrid(grid))
// Using depth-first search and propagation, try all possible values.
def search(values: Grid): Option[Grid] = {
if (values.isEmpty)
return None // Failed earlier
if (squares.forall(s => values(s).length == 1))
return Some(values) // Solved!
// Chose the unfilled square s with the fewest possibilities
val (s, n) = values.filter(_._2.length > 1).minBy(_._2.length)
values(s).toStream.map { d =>
val solution = values.clone
if (assign(solution, s, d.toString))
search(solution)
else
None
}.find(_.isDefined).flatten
}
// ################ Utilities ################
def center(s: String, max: Int, pad: String = " ") = {
def repeat(s: String, n: Int) =
s * n
val padLen = max - s.length
if (padLen <= 0)
s
else
repeat(pad, padLen / 2) + s + repeat(pad, (padLen + 1) / 2)
}
}
|
sjrd/scalajs-benchmarks
|
sudoku/src/main/scala/org/scalajs/benchmark/sudoku/Sudoku.scala
|
Scala
|
bsd-3-clause
| 6,904
|
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar
import slamdata.Predef._
import quasar.contrib.scalaz.{MonadListen_, MonadTell_}
import scalaz._
package object common {
type PhaseResults = Vector[PhaseResult]
type PhaseResultW[A] = Writer[PhaseResults, A]
type PhaseResultT[F[_], A] = WriterT[F, PhaseResults, A]
type PhaseResultTell[F[_]] = MonadTell_[F, PhaseResults]
object PhaseResultTell {
def apply[F[_]](implicit F: PhaseResultTell[F]) = F
}
type PhaseResultListen[F[_]] = MonadListen_[F, PhaseResults]
object PhaseResultListen {
def apply[F[_]](implicit F: PhaseResultListen[F]) = F
}
}
|
drostron/quasar
|
common/src/main/scala/quasar/common/package.scala
|
Scala
|
apache-2.0
| 1,200
|
/*
* Copyright 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.rst
import cats.data.NonEmptySet
import laika.ast._
import laika.parse.Parser
import laika.parse.implicits._
import laika.parse.builders._
import laika.parse.text.{CharGroup, Characters}
/**
* @author Jens Halm
*/
object BaseParsers {
/** Set of punctuation characters as supported by transitions (rules) and
* overlines and underlines for header sections.
*/
private[laika] val punctuationChars: NonEmptySet[Char] =
NonEmptySet.of('!','"','#','$','%','&','\\'','(',')','[',']','{','}','*','+',',','-','.',':',';','/','<','>','=','?','@','\\\\','^','_','`','|','~')
/** Parses punctuation characters as supported by transitions (rules) and
* overlines and underlines for header sections.
*/
val punctuationChar: Characters[String] = anyOf(punctuationChars)
/** Parses a simple reference name that only allows alphanumerical characters
* and the punctuation characters `-`, `_`, `.`, `:`, `+`.
*
* See [[http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#reference-names]].
*/
val simpleRefName: Parser[String] = {
val alphaNum = someWhile(c => Character.isDigit(c) || Character.isLetter(c))
val symbol = oneOf('-', '_', '.', ':', '+').void
alphaNum.rep(symbol).min(1).source
}
/** Parses a size and its unit, e.g. 12px.
* The unit is mandatory and must be CSS-compatible.
*/
val sizeAndUnit: Parser[Length] = {
def evalLength (value: String): Either[String, LengthUnit] =
LengthUnit.fromString(value).toRight(s"Invalid length unit: $value")
val digit = someOf(CharGroup.digit)
val amount = (digit ~ opt("." ~ digit)).source.map(_.toDouble)
val unit = (simpleRefName | "%").evalMap(evalLength)
(amount ~ (ws ~> unit)).mapN(Length.apply)
}
/** Parses any of the four supported types of footnote labels.
*
* See [[http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#footnote-references]].
*/
val footnoteLabel: Parser[FootnoteLabel] = {
val decimal = someOf(CharGroup.digit).map(n => NumericLabel(n.toInt))
val autonumber = literal("#").as(Autonumber)
val autosymbol = literal("*").as(Autosymbol)
val autonumberLabel = "#" ~> simpleRefName.map(AutonumberLabel.apply)
decimal | autonumberLabel | autonumber | autosymbol
}
}
|
planet42/Laika
|
core/shared/src/main/scala/laika/rst/BaseParsers.scala
|
Scala
|
apache-2.0
| 2,941
|
package skinny.controller.feature
import skinny.engine.SkinnyEngineBase
import skinny.engine.json.EngineJSONStringOps
import skinny.json.AngularJSONStringOps
/**
* Angular application's server side API support.
*/
trait AngularJSONFeature
extends EngineJSONStringOps
with AngularJSONStringOps { self: SkinnyEngineBase =>
}
|
holycattle/skinny-framework
|
framework/src/main/scala/skinny/controller/feature/AngularJSONFeature.scala
|
Scala
|
mit
| 336
|
package almhirt.context
import akka.actor.ActorRef
import almhirt.common._
import almhirt.akkax.{ CircuitControl, ComponentId }
import almhirt.herder.HerderMessages
trait HasAlmhirtContext {
implicit def almhirtContext: AlmhirtContext
}
|
chridou/almhirt
|
almhirt-core/src/main/scala/almhirt/context/HasAlmhirtContext.scala
|
Scala
|
apache-2.0
| 240
|
package truerss.db.driver
import java.nio.file.Paths
import java.util.Properties
import slick.jdbc._
import truerss.util.DbConfig
trait DBProfile {
val profile: JdbcProfile
val driver: String
val sourceClassName: String
val defaultConnectionSize: Int = 10
def props(dbConf: DbConfig, isUserConf: Boolean): Properties = {
val props = new Properties()
props.setProperty("dataSourceClassName", sourceClassName)
props.setProperty("dataSource.user", dbConf.dbUsername)
props.setProperty("dataSource.password", dbConf.dbPassword)
props.setProperty("dataSource.databaseName", dbConf.dbName)
props.setProperty("dataSource.serverName", dbConf.dbHost)
props.setProperty("dataSource.portNumber", dbConf.dbPort)
props
}
}
object DBProfile {
private val dbMap = Map(
"postgresql" -> Postgresql,
"sqlite" -> Sqlite,
"mysql" -> Mysql
)
def get(x: String): Option[SupportedDb] = {
dbMap.get(x.toLowerCase)
}
def create(db: SupportedDb) = {
db match {
case Postgresql => new DBProfile {
override val driver: String = "org.postgresql.Driver"
override val profile: JdbcProfile = PostgresProfile
override val sourceClassName: String = "org.postgresql.ds.PGSimpleDataSource"
}
case Sqlite => new DBProfile {
override val profile: JdbcProfile = SQLiteProfile
override val driver = "org.sqlite.JDBC"
override val sourceClassName: String = "org.sqlite.SQLiteDataSource"
override val defaultConnectionSize: Int = 1 // for sqlite: need to avoid locks
private val sqliteUrl = "jdbc:sqlite:"
override def props(dbConf: DbConfig, isUserConf: Boolean): Properties = {
val dbName = if (isUserConf) {
dbConf.dbName
} else {
s"${Paths.get("").toAbsolutePath}/${dbConf.dbName}"
}
val props = new Properties()
props.setProperty("dataSource.databaseName", dbName)
props.setProperty("driverClassName", driver)
val jdbcUrl = if (dbName.startsWith(sqliteUrl)) {
dbName
} else {
s"$sqliteUrl$dbName"
}
props.setProperty("jdbcUrl", jdbcUrl)
props
}
}
case Mysql => new DBProfile {
override val driver: String = "com.mysql.jdbc.Driver"
override val profile: JdbcProfile = MySQLProfile
override val sourceClassName: String = "com.mysql.cj.jdbc.MysqlDataSource"
}
}
}
}
|
truerss/truerss
|
src/main/scala/truerss/db/driver/DBProfile.scala
|
Scala
|
mit
| 2,521
|
package com.bwsw.tstreamstransactionserver.netty.server.db.zk
import java.util.concurrent.ConcurrentHashMap
import com.bwsw.tstreamstransactionserver.netty.server.streamService.{StreamCRUD}
import com.bwsw.tstreamstransactionserver.netty.server.streamService
import org.apache.curator.framework.CuratorFramework
final class StreamDatabaseZK(client: CuratorFramework, path: String)
extends StreamCRUD
{
private val streamCache =
new ConcurrentHashMap[streamService.StreamKey, streamService.StreamValue]()
private val streamNamePath = new StreamNamePath(client, s"$path/names")
private val streamIDPath = new StreamIDPath(client, s"$path/ids")
override def putStream(streamValue: streamService.StreamValue): streamService.StreamKey = {
if (!streamNamePath.checkExists(streamValue.name)) {
val streamRecord = streamIDPath.put(streamValue)
streamNamePath.put(streamRecord)
streamCache.put(streamRecord.key, streamRecord.stream)
streamRecord.key
} else streamService.StreamKey(-1)
}
override def checkStreamExists(name: String): Boolean =
streamNamePath.checkExists(name)
override def delStream(name: String): Boolean =
streamNamePath.delete(name)
override def getStream(name: String): Option[streamService.StreamRecord] =
streamNamePath.get(name)
override def getStream(streamKey: streamService.StreamKey): Option[streamService.StreamRecord] = {
Option(streamCache.get(streamKey))
.map(steamValue => streamService.StreamRecord(streamKey, steamValue))
.orElse{
val streamRecordOpt = streamIDPath.get(streamKey)
streamRecordOpt.foreach(streamRecord =>
streamCache.put(streamRecord.key, streamRecord.stream)
)
streamRecordOpt
}
}
}
|
bwsw/tstreams-transaction-server
|
src/main/scala/com/bwsw/tstreamstransactionserver/netty/server/db/zk/StreamDatabaseZK.scala
|
Scala
|
apache-2.0
| 1,775
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.worker
import java.io.File
import java.text.SimpleDateFormat
import java.util.Date
import scala.collection.mutable.HashMap
import scala.concurrent.duration._
import akka.actor._
import akka.remote.{DisassociatedEvent, RemotingLifecycleEvent}
import org.apache.spark.{Logging, SparkConf, SparkException}
import org.apache.spark.deploy.{ExecutorDescription, ExecutorState}
import org.apache.spark.deploy.DeployMessages._
import org.apache.spark.deploy.master.{DriverState, Master}
import org.apache.spark.deploy.master.DriverState.DriverState
import org.apache.spark.deploy.worker.ui.WorkerWebUI
import org.apache.spark.metrics.MetricsSystem
import org.apache.spark.util.{AkkaUtils, Utils}
/**
* @param masterUrls Each url should look like spark://host:port.
*/
private[spark] class Worker(
host: String,
port: Int,
webUiPort: Int,
cores: Int,
memory: Int,
masterUrls: Array[String],
actorSystemName: String,
actorName: String,
workDirPath: String = null,
val conf: SparkConf)
extends Actor with Logging {
import context.dispatcher
Utils.checkHost(host, "Expected hostname")
assert (port > 0)
val DATE_FORMAT = new SimpleDateFormat("yyyyMMddHHmmss") // For worker and executor IDs
// Send a heartbeat every (heartbeat timeout) / 4 milliseconds
val HEARTBEAT_MILLIS = conf.getLong("spark.worker.timeout", 60) * 1000 / 4
val REGISTRATION_TIMEOUT = 20.seconds
val REGISTRATION_RETRIES = 3
// Index into masterUrls that we're currently trying to register with.
var masterIndex = 0
val masterLock: Object = new Object()
var master: ActorSelection = null
var masterAddress: Address = null
var activeMasterUrl: String = ""
var activeMasterWebUiUrl : String = ""
val akkaUrl = "akka.tcp://%s@%s:%s/user/%s".format(actorSystemName, host, port, actorName)
@volatile var registered = false
@volatile var connected = false
val workerId = generateWorkerId()
val sparkHome = new File(Option(System.getenv("SPARK_HOME")).getOrElse("."))
var workDir: File = null
val executors = new HashMap[String, ExecutorRunner]
val finishedExecutors = new HashMap[String, ExecutorRunner]
val drivers = new HashMap[String, DriverRunner]
val finishedDrivers = new HashMap[String, DriverRunner]
val publicAddress = {
val envVar = System.getenv("SPARK_PUBLIC_DNS")
if (envVar != null) envVar else host
}
var webUi: WorkerWebUI = null
var coresUsed = 0
var memoryUsed = 0
val metricsSystem = MetricsSystem.createMetricsSystem("worker", conf)
val workerSource = new WorkerSource(this)
def coresFree: Int = cores - coresUsed
def memoryFree: Int = memory - memoryUsed
def createWorkDir() {
workDir = Option(workDirPath).map(new File(_)).getOrElse(new File(sparkHome, "work"))
try {
// This sporadically fails - not sure why ... !workDir.exists() && !workDir.mkdirs()
// So attempting to create and then check if directory was created or not.
workDir.mkdirs()
if ( !workDir.exists() || !workDir.isDirectory) {
logError("Failed to create work directory " + workDir)
System.exit(1)
}
assert (workDir.isDirectory)
} catch {
case e: Exception =>
logError("Failed to create work directory " + workDir, e)
System.exit(1)
}
}
override def preStart() {
assert(!registered)
logInfo("Starting Spark worker %s:%d with %d cores, %s RAM".format(
host, port, cores, Utils.megabytesToString(memory)))
logInfo("Spark home: " + sparkHome)
createWorkDir()
webUi = new WorkerWebUI(this, workDir, Some(webUiPort))
context.system.eventStream.subscribe(self, classOf[RemotingLifecycleEvent])
webUi.start()
registerWithMaster()
metricsSystem.registerSource(workerSource)
metricsSystem.start()
}
def changeMaster(url: String, uiUrl: String) {
masterLock.synchronized {
activeMasterUrl = url
activeMasterWebUiUrl = uiUrl
master = context.actorSelection(Master.toAkkaUrl(activeMasterUrl))
masterAddress = activeMasterUrl match {
case Master.sparkUrlRegex(_host, _port) =>
Address("akka.tcp", Master.systemName, _host, _port.toInt)
case x =>
throw new SparkException("Invalid spark URL: " + x)
}
connected = true
}
}
def tryRegisterAllMasters() {
for (masterUrl <- masterUrls) {
logInfo("Connecting to master " + masterUrl + "...")
val actor = context.actorSelection(Master.toAkkaUrl(masterUrl))
actor ! RegisterWorker(workerId, host, port, cores, memory, webUi.boundPort.get,
publicAddress)
}
}
def registerWithMaster() {
tryRegisterAllMasters()
var retries = 0
lazy val retryTimer: Cancellable =
context.system.scheduler.schedule(REGISTRATION_TIMEOUT, REGISTRATION_TIMEOUT) {
retries += 1
if (registered) {
retryTimer.cancel()
} else if (retries >= REGISTRATION_RETRIES) {
logError("All masters are unresponsive! Giving up.")
System.exit(1)
} else {
tryRegisterAllMasters()
}
}
retryTimer // start timer
}
override def receive = {
case RegisteredWorker(masterUrl, masterWebUiUrl) =>
logInfo("Successfully registered with master " + masterUrl)
registered = true
changeMaster(masterUrl, masterWebUiUrl)
context.system.scheduler.schedule(0 millis, HEARTBEAT_MILLIS millis, self, SendHeartbeat)
case SendHeartbeat =>
masterLock.synchronized {
if (connected) { master ! Heartbeat(workerId) }
}
case MasterChanged(masterUrl, masterWebUiUrl) =>
logInfo("Master has changed, new master is at " + masterUrl)
changeMaster(masterUrl, masterWebUiUrl)
val execs = executors.values.
map(e => new ExecutorDescription(e.appId, e.execId, e.cores, e.state))
sender ! WorkerSchedulerStateResponse(workerId, execs.toList, drivers.keys.toSeq)
case Heartbeat =>
logInfo(s"Received heartbeat from driver ${sender.path}")
case RegisterWorkerFailed(message) =>
if (!registered) {
logError("Worker registration failed: " + message)
System.exit(1)
}
case LaunchExecutor(masterUrl, appId, execId, appDesc, cores_, memory_) =>
if (masterUrl != activeMasterUrl) {
logWarning("Invalid Master (" + masterUrl + ") attempted to launch executor.")
} else {
try {
logInfo("Asked to launch executor %s/%d for %s".format(appId, execId, appDesc.name))
val manager = new ExecutorRunner(appId, execId, appDesc, cores_, memory_,
self, workerId, host,
appDesc.sparkHome.map(userSparkHome => new File(userSparkHome)).getOrElse(sparkHome),
workDir, akkaUrl, ExecutorState.RUNNING)
executors(appId + "/" + execId) = manager
manager.start()
coresUsed += cores_
memoryUsed += memory_
masterLock.synchronized {
master ! ExecutorStateChanged(appId, execId, manager.state, None, None)
}
} catch {
case e: Exception => {
logError("Failed to launch exector %s/%d for %s".format(appId, execId, appDesc.name))
if (executors.contains(appId + "/" + execId)) {
executors(appId + "/" + execId).kill()
executors -= appId + "/" + execId
}
masterLock.synchronized {
master ! ExecutorStateChanged(appId, execId, ExecutorState.FAILED, None, None)
}
}
}
}
case ExecutorStateChanged(appId, execId, state, message, exitStatus) =>
masterLock.synchronized {
master ! ExecutorStateChanged(appId, execId, state, message, exitStatus)
}
val fullId = appId + "/" + execId
if (ExecutorState.isFinished(state)) {
val executor = executors(fullId)
logInfo("Executor " + fullId + " finished with state " + state +
message.map(" message " + _).getOrElse("") +
exitStatus.map(" exitStatus " + _).getOrElse(""))
executors -= fullId
finishedExecutors(fullId) = executor
coresUsed -= executor.cores
memoryUsed -= executor.memory
}
case KillExecutor(masterUrl, appId, execId) =>
if (masterUrl != activeMasterUrl) {
logWarning("Invalid Master (" + masterUrl + ") attempted to launch executor " + execId)
} else {
val fullId = appId + "/" + execId
executors.get(fullId) match {
case Some(executor) =>
logInfo("Asked to kill executor " + fullId)
executor.kill()
case None =>
logInfo("Asked to kill unknown executor " + fullId)
}
}
case LaunchDriver(driverId, driverDesc) => {
logInfo(s"Asked to launch driver $driverId")
val driver = new DriverRunner(driverId, workDir, sparkHome, driverDesc, self, akkaUrl)
drivers(driverId) = driver
driver.start()
coresUsed += driverDesc.cores
memoryUsed += driverDesc.mem
}
case KillDriver(driverId) => {
logInfo(s"Asked to kill driver $driverId")
drivers.get(driverId) match {
case Some(runner) =>
runner.kill()
case None =>
logError(s"Asked to kill unknown driver $driverId")
}
}
case DriverStateChanged(driverId, state, exception) => {
state match {
case DriverState.ERROR =>
logWarning(s"Driver $driverId failed with unrecoverable exception: ${exception.get}")
case DriverState.FINISHED =>
logInfo(s"Driver $driverId exited successfully")
case DriverState.KILLED =>
logInfo(s"Driver $driverId was killed by user")
}
masterLock.synchronized {
master ! DriverStateChanged(driverId, state, exception)
}
val driver = drivers.remove(driverId).get
finishedDrivers(driverId) = driver
memoryUsed -= driver.driverDesc.mem
coresUsed -= driver.driverDesc.cores
}
case x: DisassociatedEvent if x.remoteAddress == masterAddress =>
logInfo(s"$x Disassociated !")
masterDisconnected()
case RequestWorkerState => {
sender ! WorkerStateResponse(host, port, workerId, executors.values.toList,
finishedExecutors.values.toList, drivers.values.toList,
finishedDrivers.values.toList, activeMasterUrl, cores, memory,
coresUsed, memoryUsed, activeMasterWebUiUrl)
}
}
def masterDisconnected() {
logError("Connection to master failed! Waiting for master to reconnect...")
connected = false
}
def generateWorkerId(): String = {
"worker-%s-%s-%d".format(DATE_FORMAT.format(new Date), host, port)
}
override def postStop() {
executors.values.foreach(_.kill())
drivers.values.foreach(_.kill())
webUi.stop()
metricsSystem.stop()
}
}
private[spark] object Worker {
def main(argStrings: Array[String]) {
val args = new WorkerArguments(argStrings)
val (actorSystem, _) = startSystemAndActor(args.host, args.port, args.webUiPort, args.cores,
args.memory, args.masters, args.workDir)
actorSystem.awaitTermination()
}
def startSystemAndActor(host: String, port: Int, webUiPort: Int, cores: Int, memory: Int,
masterUrls: Array[String], workDir: String, workerNumber: Option[Int] = None)
: (ActorSystem, Int) =
{
// The LocalSparkCluster runs multiple local sparkWorkerX actor systems
val conf = new SparkConf
val systemName = "sparkWorker" + workerNumber.map(_.toString).getOrElse("")
val actorName = "Worker"
val (actorSystem, boundPort) = AkkaUtils.createActorSystem(systemName, host, port,
conf = conf)
actorSystem.actorOf(Props(classOf[Worker], host, boundPort, webUiPort, cores, memory,
masterUrls, systemName, actorName, workDir, conf), name = actorName)
(actorSystem, boundPort)
}
}
|
cloudera/spark
|
core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
|
Scala
|
apache-2.0
| 12,776
|
// Copyright (c) 2011-2015 ScalaMock Contributors (https://github.com/paulbutcher/ScalaMock/graphs/contributors)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package com.example.proxy.mockitostyle
import com.example.{Order, Warehouse}
import org.scalatest.WordSpec
import org.scalamock.scalatest.proxy.MockFactory
// This is a reworked version of the example from Martin Fowler's article
// Mocks Aren't Stubs http://martinfowler.com/articles/mocksArentStubs.html
class OrderTest extends WordSpec with MockFactory {
"An order" when {
"in stock" should {
"remove inventory" in {
val mockWarehouse = stub[Warehouse]
mockWarehouse.when('hasInventory)("Talisker", 50).returns(true)
val order = new Order("Talisker", 50)
order.fill(mockWarehouse)
assert(order.isFilled)
mockWarehouse.verify('remove)("Talisker", 50).once
}
}
"out of stock" should {
"remove nothing" in {
val mockWarehouse = stub[Warehouse]
mockWarehouse.when('hasInventory)(*, *).returns(false)
val order = new Order("Talisker", 50)
order.fill(mockWarehouse)
assert(!order.isFilled)
}
}
}
}
|
hypertino/ScalaMock
|
examples/jvm/com/example/proxy/mockitostyle/OrderTest.scala
|
Scala
|
mit
| 2,275
|
package medium
import medium.MediumApiProtocol._
import medium.domainObjects._
import okhttp3._
import spray.json._
class MediumClient(clientId: String, clientSecret: String, var accessToken: Option[String] = None) {
val client = new OkHttpClient()
val baseApiUrl: HttpUrl = new HttpUrl.Builder()
.scheme("https")
.host("api.medium.com")
.build()
def getUser: User = accessToken match {
case Some(at) =>
val request = new Request.Builder()
.header("Content-Type", "application/json")
.header("Accept", "application/json")
.header("Accept-Charset", "utf-8")
.header("Authorization", s"Bearer $at")
.url(baseApiUrl.resolve("/v1/me"))
.get()
.build()
makeRequest[User](request)
case _ => throw new MediumException("Please set access token")
}
def createPost(authorId: String, postRequest: PostRequest): Post = accessToken match {
case Some(at) =>
val httpUrl = baseApiUrl.resolve(s"/v1/users/$authorId/posts")
val request = new Request.Builder()
.header("Content-Type", "application/json")
.header("Accept", "application/json")
.header("Accept-Charset", "utf-8")
.header("Authorization", s"Bearer $at")
.url(httpUrl)
.post(RequestBody.create(MediaType.parse("application/json"), postRequest.toJson.prettyPrint))
.build()
makeRequest[Post](request)
case _ => throw new MediumException("Please set access token")
}
private def makeRequest[T](request: Request)(implicit p: JsonReader[T]): T= {
val response = client.newCall(request).execute()
val responseJson = response.body().string()
println(s"Received response $responseJson")
response match {
case r if r.isSuccessful =>
val jsValue: JsValue = responseJson.parseJson
jsValue.asJsObject.getFields("data").headOption match {
case Some(data) => data.convertTo[T]
case _ => throw new MediumException(s"Received unexpected JSON response $responseJson")
}
case _ => throw new MediumException(s"Received HTTP error response code ${response.code()}")
}
}
}
object MediumClient {
def apply(clientId: String, clientSecret: String): MediumClient = new MediumClient(clientId, clientSecret)
def apply(clientId: String, clientSecret: String, accessToken: String): MediumClient = new MediumClient(clientId, clientSecret, Some(accessToken))
}
case class MediumException(message: String, cause: Throwable = null) extends RuntimeException(message, cause)
|
shekhargulati/52-technologies-in-2016
|
06-okhttp/medium-scala-client/src/main/scala/medium/MediumClient.scala
|
Scala
|
mit
| 2,566
|
/*
*
* The MIT License (MIT)
*
* Copyright (c) 2015 Ryan C. Brozo
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package com.ryanbrozo.spray.hawk
import spray.http.HttpHeaders.Authorization
import spray.http.{GenericHttpCredentials, HttpRequest}
import spray.util._
/**
* Abstracts access to attributes specific to a Hawk Authorization header. Given an HttpRequest, this class extracts the
* `Authorization` header attributes
*
* @param request Spray HttpRequest to extract attributes from.
*/
private[hawk] case class AuthHeaderAttributes(request: HttpRequest) {
import AuthHeaderKeys._
private lazy val _authHeader: Option[Authorization] = request.headers
.findByType[`Authorization`]
.filter {
case Authorization(creds: GenericHttpCredentials) => creds.scheme == HEADER_NAME
case _ => false
}
lazy val isPresent: Boolean = _authHeader.isDefined
private lazy val _credentials = _authHeader.map {
case Authorization(creds: GenericHttpCredentials) => creds
}
private lazy val _extractor = _credentials map extractAuthKey
/**
* Extracts a key from the Authorization header
*
* @param credentials Authorization header represented as [[spray.http.GenericHttpCredentials]]
* @param key Key of value to obtain
* @return Extracted value wrapped as a [[scala.Option]]
*/
private def extractAuthKey(credentials: GenericHttpCredentials)(key: AuthHeaderKeys.Value): Option[String] =
credentials.params.get(key.toString)
lazy val id: String = _extractor flatMap {_(Id)} getOrElse ""
lazy val tsString: Option[String] = _extractor flatMap {_(Ts)}
lazy val ts: TimeStamp = tsString map {_.toDouble.toLong} getOrElse 0
lazy val nonce: Option[Nonce] = _extractor flatMap {_(Nonce)}
lazy val hash: Option[String] = _extractor flatMap {_(Hash)}
lazy val ext: Option[ExtData] = _extractor flatMap {_(Ext)}
lazy val mac: Option[String] = _extractor flatMap {_(Mac)}
}
|
ryanbrozo/spray-hawk
|
lib/src/main/scala/com/ryanbrozo/spray/hawk/AuthHeaderAttributes.scala
|
Scala
|
mit
| 2,984
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.avro
import scala.collection.JavaConverters._
import scala.util.Random
import com.fasterxml.jackson.annotation.ObjectIdGenerators.UUIDGenerator
import org.apache.avro.{LogicalType, LogicalTypes, Schema, SchemaBuilder}
import org.apache.avro.LogicalTypes.{Date, Decimal, TimestampMicros, TimestampMillis}
import org.apache.avro.Schema.Type._
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.util.RandomUUIDGenerator
import org.apache.spark.sql.internal.SQLConf.AvroOutputTimestampType
import org.apache.spark.sql.types._
import org.apache.spark.sql.types.Decimal.{maxPrecisionForBytes, minBytesForPrecision}
/**
* This object contains method that are used to convert sparkSQL schemas to avro schemas and vice
* versa.
*/
object SchemaConverters {
private lazy val uuidGenerator = RandomUUIDGenerator(new Random().nextLong())
private lazy val nullSchema = Schema.create(Schema.Type.NULL)
case class SchemaType(dataType: DataType, nullable: Boolean)
/**
* This function takes an avro schema and returns a sql schema.
*/
def toSqlType(avroSchema: Schema): SchemaType = {
avroSchema.getType match {
case INT => avroSchema.getLogicalType match {
case _: Date => SchemaType(DateType, nullable = false)
case _ => SchemaType(IntegerType, nullable = false)
}
case STRING => SchemaType(StringType, nullable = false)
case BOOLEAN => SchemaType(BooleanType, nullable = false)
case BYTES | FIXED => avroSchema.getLogicalType match {
// For FIXED type, if the precision requires more bytes than fixed size, the logical
// type will be null, which is handled by Avro library.
case d: Decimal => SchemaType(DecimalType(d.getPrecision, d.getScale), nullable = false)
case _ => SchemaType(BinaryType, nullable = false)
}
case DOUBLE => SchemaType(DoubleType, nullable = false)
case FLOAT => SchemaType(FloatType, nullable = false)
case LONG => avroSchema.getLogicalType match {
case _: TimestampMillis | _: TimestampMicros => SchemaType(TimestampType, nullable = false)
case _ => SchemaType(LongType, nullable = false)
}
case ENUM => SchemaType(StringType, nullable = false)
case RECORD =>
val fields = avroSchema.getFields.asScala.map { f =>
val schemaType = toSqlType(f.schema())
StructField(f.name, schemaType.dataType, schemaType.nullable)
}
SchemaType(StructType(fields), nullable = false)
case ARRAY =>
val schemaType = toSqlType(avroSchema.getElementType)
SchemaType(
ArrayType(schemaType.dataType, containsNull = schemaType.nullable),
nullable = false)
case MAP =>
val schemaType = toSqlType(avroSchema.getValueType)
SchemaType(
MapType(StringType, schemaType.dataType, valueContainsNull = schemaType.nullable),
nullable = false)
case UNION =>
if (avroSchema.getTypes.asScala.exists(_.getType == NULL)) {
// In case of a union with null, eliminate it and make a recursive call
val remainingUnionTypes = avroSchema.getTypes.asScala.filterNot(_.getType == NULL)
if (remainingUnionTypes.size == 1) {
toSqlType(remainingUnionTypes.head).copy(nullable = true)
} else {
toSqlType(Schema.createUnion(remainingUnionTypes.asJava)).copy(nullable = true)
}
} else avroSchema.getTypes.asScala.map(_.getType) match {
case Seq(t1) =>
toSqlType(avroSchema.getTypes.get(0))
case Seq(t1, t2) if Set(t1, t2) == Set(INT, LONG) =>
SchemaType(LongType, nullable = false)
case Seq(t1, t2) if Set(t1, t2) == Set(FLOAT, DOUBLE) =>
SchemaType(DoubleType, nullable = false)
case _ =>
// Convert complex unions to struct types where field names are member0, member1, etc.
// This is consistent with the behavior when converting between Avro and Parquet.
val fields = avroSchema.getTypes.asScala.zipWithIndex.map {
case (s, i) =>
val schemaType = toSqlType(s)
// All fields are nullable because only one of them is set at a time
StructField(s"member$i", schemaType.dataType, nullable = true)
}
SchemaType(StructType(fields), nullable = false)
}
case other => throw new IncompatibleSchemaException(s"Unsupported type $other")
}
}
def toAvroType(
catalystType: DataType,
nullable: Boolean = false,
recordName: String = "topLevelRecord",
prevNameSpace: String = "",
outputTimestampType: AvroOutputTimestampType.Value = AvroOutputTimestampType.TIMESTAMP_MICROS)
: Schema = {
val builder = SchemaBuilder.builder()
val schema = catalystType match {
case BooleanType => builder.booleanType()
case ByteType | ShortType | IntegerType => builder.intType()
case LongType => builder.longType()
case DateType =>
LogicalTypes.date().addToSchema(builder.intType())
case TimestampType =>
val timestampType = outputTimestampType match {
case AvroOutputTimestampType.TIMESTAMP_MILLIS => LogicalTypes.timestampMillis()
case AvroOutputTimestampType.TIMESTAMP_MICROS => LogicalTypes.timestampMicros()
case other =>
throw new IncompatibleSchemaException(s"Unexpected output timestamp type $other.")
}
timestampType.addToSchema(builder.longType())
case FloatType => builder.floatType()
case DoubleType => builder.doubleType()
case StringType => builder.stringType()
case d: DecimalType =>
val avroType = LogicalTypes.decimal(d.precision, d.scale)
val fixedSize = minBytesForPrecision(d.precision)
// Need to avoid naming conflict for the fixed fields
val name = prevNameSpace match {
case "" => s"$recordName.fixed"
case _ => s"$prevNameSpace.$recordName.fixed"
}
avroType.addToSchema(SchemaBuilder.fixed(name).size(fixedSize))
case BinaryType => builder.bytesType()
case ArrayType(et, containsNull) =>
builder.array()
.items(toAvroType(et, containsNull, recordName, prevNameSpace, outputTimestampType))
case MapType(StringType, vt, valueContainsNull) =>
builder.map()
.values(toAvroType(vt, valueContainsNull, recordName, prevNameSpace, outputTimestampType))
case st: StructType =>
val nameSpace = prevNameSpace match {
case "" => recordName
case _ => s"$prevNameSpace.$recordName"
}
val fieldsAssembler = builder.record(recordName).namespace(nameSpace).fields()
st.foreach { f =>
val fieldAvroType =
toAvroType(f.dataType, f.nullable, f.name, nameSpace, outputTimestampType)
fieldsAssembler.name(f.name).`type`(fieldAvroType).noDefault()
}
fieldsAssembler.endRecord()
// This should never happen.
case other => throw new IncompatibleSchemaException(s"Unexpected type $other.")
}
if (nullable) {
Schema.createUnion(schema, nullSchema)
} else {
schema
}
}
}
class IncompatibleSchemaException(msg: String, ex: Throwable = null) extends Exception(msg, ex)
|
rikima/spark
|
external/avro/src/main/scala/org/apache/spark/sql/avro/SchemaConverters.scala
|
Scala
|
apache-2.0
| 8,223
|
package org.oedura.scavro
import org.apache.avro.Schema
/* Mock class for reading/writing */
case class Number(name: String, value: Int) extends AvroSerializeable {
type J = MockNumber
override def toAvro: MockNumber = new MockNumber(name, value)
}
object Number {
implicit def reader = new AvroReader[Number] { override type J = MockNumber }
implicit val metadata = new AvroMetadata[Number, MockNumber] {
override val avroClass = classOf[MockNumber]
override val schema: Schema = MockNumber.getClassSchema
override val fromAvro: (MockNumber) => Number = { mock =>
val name: String = mock.get(0).asInstanceOf[String]
val value: Int = mock.get(1).asInstanceOf[Int]
Number(name, value)
}
}
}
|
oedura/scavro
|
src/test/scala/org/oedura/scavro/Number.scala
|
Scala
|
apache-2.0
| 737
|
package com.gilt.gfc.aws.kinesis.client
/**
* Simplified view of AWS SDK's kinesis record, just a couple of things we care about.
*/
case class KinesisRecord (
partitionKey: String,
data: Array[Byte]
)
/** 'type class' of things that can be converted to KinesisRecord. */
trait KinesisRecordWriter[R] {
def toKinesisRecord(r: R): KinesisRecord
}
|
gilt/gfc-aws-kinesis
|
client/src/main/scala/com/gilt/gfc/aws/kinesis/client/KinesisRecord.scala
|
Scala
|
apache-2.0
| 356
|
package mesosphere.marathon.state
import mesosphere.marathon.metrics.Metrics
import scala.concurrent.Future
/**
* Stores the last TaskFailure per app id.
*/
class TaskFailureRepository(
protected val store: EntityStore[TaskFailure],
protected val maxVersions: Option[Int] = Some(1),
protected val metrics: Metrics)
extends EntityRepository[TaskFailure] {
def store(id: PathId, value: TaskFailure): Future[TaskFailure] = super.storeByName(id.safePath, value)
def expunge(id: PathId): Future[Iterable[Boolean]] = super.expunge(id.safePath)
def current(id: PathId): Future[Option[TaskFailure]] = super.currentVersion(id.safePath)
}
|
Kosta-Github/marathon
|
src/main/scala/mesosphere/marathon/state/TaskFailureRepository.scala
|
Scala
|
apache-2.0
| 656
|
package com.pharmpress.scalaencoder
case class Employee(name: String, number: Int, manager: Boolean)
|
pharmpress/codingdojo
|
scala-serializer/src/test/scala/com/pharmpress/scalaencoder/Employee.scala
|
Scala
|
apache-2.0
| 102
|
package de.htwg.zeta.server.model.metaModel
import javax.inject.Inject
import javax.inject.Singleton
import akka.actor.ActorRef
import akka.actor.ActorSystem
@Singleton
class MetaModelWsMediatorContainer @Inject()(private val system: ActorSystem) {
val mediator: ActorRef = system.actorOf(MetaModelWsMediatorActor.props(), "metaModelWsMediator")
}
|
Zeta-Project/zeta
|
api/server/app/de/htwg/zeta/server/model/metaModel/MetaModelWsMediatorContainer.scala
|
Scala
|
bsd-2-clause
| 353
|
def tail[A]: Stream[A] => Stream[A] = {
case Stream(a, as) => as()
}
|
hmemcpy/milewski-ctfp-pdf
|
src/content/3.7/code/scala/snippet21.scala
|
Scala
|
gpl-3.0
| 70
|
/*
* Copyright 2019 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s.ember.client.internal
import cats.data.NonEmptyList
import cats.effect._
import org.http4s._
import org.http4s.ember.client.EmberClientBuilder
import org.http4s.headers.Connection
import org.http4s.headers.Date
import org.http4s.headers.`User-Agent`
import org.typelevel.ci._
import org.typelevel.keypool.Reusable
class ClientHelpersSuite extends Http4sSuite {
test("Request Preprocessing should add a date header if not present") {
ClientHelpers
.preprocessRequest(Request[IO](), None)
.map { req =>
req.headers.get[Date].isDefined
}
.assert
}
test("Request Preprocessing should not add a date header if already present") {
ClientHelpers
.preprocessRequest(
Request[IO](
headers = Headers(Date(HttpDate.Epoch))
),
None,
)
.map { req =>
req.headers.get[Date].map { case d: Date =>
d.date
}
}
.assertEquals(Some(HttpDate.Epoch))
}
test("Request Preprocessing should add a connection keep-alive header if not present") {
ClientHelpers
.preprocessRequest(Request[IO](), None)
.map { req =>
req.headers.get[Connection].map { case c: Connection =>
c.hasKeepAlive
}
}
.assertEquals(Some(true))
}
test("Request Preprocessing should not add a connection header if already present") {
ClientHelpers
.preprocessRequest(
Request[IO](headers = Headers(Connection(NonEmptyList.of(ci"close")))),
None,
)
.map { req =>
req.headers.get[Connection].map { case c: Connection =>
c.hasKeepAlive
}
}
.assertEquals(Some(false))
}
test("Request Preprocessing should add default user-agent") {
ClientHelpers
.preprocessRequest(Request[IO](), EmberClientBuilder.default[IO].userAgent)
.map { req =>
req.headers.get[`User-Agent`].isDefined
}
.assert
}
test("Request Preprocessing should not change a present user-agent") {
val name = "foo"
ClientHelpers
.preprocessRequest(
Request[IO](
headers = Headers(`User-Agent`(ProductId(name, None)))
),
EmberClientBuilder.default[IO].userAgent,
)
.map { req =>
req.headers.get[`User-Agent`].map { case e =>
e.product.value
}
}
.assertEquals(Some(name))
}
test("Postprocess response should reuse") {
for {
nextBytes <- Ref[IO].of(Array.emptyByteArray)
reuse <- Ref[IO].of(Reusable.DontReuse: Reusable)
_ <- ClientHelpers
.postProcessResponse[IO](
Request[IO](),
Response[IO](),
IO.pure(Some(Array.emptyByteArray)),
nextBytes,
reuse,
)
testResult <- reuse.get.map { case r =>
assertEquals(r, Reusable.Reuse)
}
} yield testResult
}
test("Postprocess response should save drained bytes when reused") {
for {
nextBytes <- Ref[IO].of(Array.emptyByteArray)
reuse <- Ref[IO].of(Reusable.DontReuse: Reusable)
_ <- ClientHelpers.postProcessResponse[IO](
Request[IO](),
Response[IO](),
IO.pure(Some(Array[Byte](1, 2, 3))),
nextBytes,
reuse,
)
drained <- nextBytes.get
} yield assertEquals(drained.toList, List[Byte](1, 2, 3))
}
test("Postprocess response should not reuse when connection close is set on request") {
for {
nextBytes <- Ref[IO].of(Array.emptyByteArray)
reuse <- Ref[IO].of(Reusable.DontReuse: Reusable)
_ <- ClientHelpers
.postProcessResponse[IO](
Request[IO](headers = Headers(Connection(NonEmptyList.of(ci"close")))),
Response[IO](),
IO.pure(Some(Array.emptyByteArray)),
nextBytes,
reuse,
)
testResult <- reuse.get.map { case r =>
assertEquals(r, Reusable.DontReuse)
}
} yield testResult
}
test("Postprocess response should do not reuse when connection close is set on response") {
for {
nextBytes <- Ref[IO].of(Array.emptyByteArray)
reuse <- Ref[IO].of(Reusable.DontReuse: Reusable)
_ <- ClientHelpers
.postProcessResponse[IO](
Request[IO](),
Response[IO](headers = Headers(Connection(NonEmptyList.of(ci"close")))),
IO.pure(Some(Array.emptyByteArray)),
nextBytes,
reuse,
)
testResult <- reuse.get.map { case r =>
assertEquals(r, Reusable.DontReuse)
}
} yield testResult
}
test("Postprocess response should do not reuse when drain is None") {
for {
nextBytes <- Ref[IO].of(Array.emptyByteArray)
reuse <- Ref[IO].of(Reusable.DontReuse: Reusable)
_ <- ClientHelpers
.postProcessResponse[IO](
Request[IO](),
Response[IO](),
IO.pure(None),
nextBytes,
reuse,
)
testResult <- reuse.get.map { case r =>
assertEquals(r, Reusable.DontReuse)
}
} yield testResult
}
}
|
http4s/http4s
|
ember-client/shared/src/test/scala/org/http4s/ember/client/internal/ClientHelpersSuite.scala
|
Scala
|
apache-2.0
| 5,671
|
/*
* Copyright (c) 2021, salesforce.com, inc.
* All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause
* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
package com.krux.hyperion.precondition
import com.krux.hyperion.adt.HString
import com.krux.hyperion.aws.AdpDynamoDBDataExistsPrecondition
import com.krux.hyperion.common.{ BaseFields, PipelineObjectId }
import com.krux.hyperion.HyperionContext
/**
* A precondition to check that data exists in a DynamoDB table.
*
* @param tableName The DynamoDB table to check.
*/
case class DynamoDBDataExistsPrecondition private (
baseFields: BaseFields,
preconditionFields: PreconditionFields,
tableName: HString
) extends Precondition {
type Self = DynamoDBDataExistsPrecondition
def updateBaseFields(fields: BaseFields) = copy(baseFields = fields)
def updatePreconditionFields(fields: PreconditionFields) = copy(preconditionFields = fields)
lazy val serialize = AdpDynamoDBDataExistsPrecondition(
id = id,
name = name,
tableName = tableName.serialize,
role = role.serialize,
preconditionTimeout = preconditionTimeout.map(_.serialize),
maximumRetries = maximumRetries.map(_.serialize),
onFail = seqToOption(onFail)(_.ref),
onLateAction = seqToOption(onLateAction)(_.ref),
onSuccess = seqToOption(onSuccess)(_.ref)
)
}
object DynamoDBDataExistsPrecondition {
def apply(tableName: HString)(implicit hc: HyperionContext) = new DynamoDBDataExistsPrecondition(
baseFields = BaseFields(PipelineObjectId(DynamoDBDataExistsPrecondition.getClass)),
preconditionFields = Precondition.defaultPreconditionFields,
tableName = tableName
)
}
|
realstraw/hyperion
|
core/src/main/scala/com/krux/hyperion/precondition/DynamoDBDataExistsPrecondition.scala
|
Scala
|
bsd-3-clause
| 1,726
|
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.scalate.util
object Threads {
def thread(name: String)(func: => Unit) {
new Thread(name) {
override def run = {
func
}
}.start()
}
}
|
dnatic09/scalate
|
scalate-util/src/main/scala/org/fusesource/scalate/util/Threads.scala
|
Scala
|
apache-2.0
| 927
|
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.nsc
package backend.jvm
import scala.reflect.internal.util.Statistics
// Enable with `-Ystatistics:jvm`
trait BackendStats {
self: Statistics =>
val bcodeTimer = newTimer("time in backend", "jvm")
val bcodeInitTimer = newSubTimer("bcode initialization", bcodeTimer)
val bcodeGenStat = newSubTimer("code generation", bcodeTimer)
val methodOptTimer = newSubTimer("intra-method optimizations", bcodeTimer)
val bcodeWriteTimer = newSubTimer("classfile writing", bcodeTimer)
}
|
scala/scala
|
src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala
|
Scala
|
apache-2.0
| 807
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.admin
import java.util.Random
import kafka.utils.{Logging, ZkUtils}
import org.I0Itec.zkclient.ZkClient
import org.I0Itec.zkclient.exception.ZkNodeExistsException
import scala.collection._
import scala.collection.mutable
import kafka.common._
object AdminUtils extends Logging {
val rand = new Random
val AdminEpoch = -1
/**
* There are 2 goals of replica assignment:
* 1. Spread the replicas evenly among brokers.
* 2. For partitions assigned to a particular broker, their other replicas are spread over the other brokers.
*
* To achieve this goal, we:
* 1. Assign the first replica of each partition by round-robin, starting from a random position in the broker list.
* 2. Assign the remaining replicas of each partition with an increasing shift.
*
* Here is an example of assigning
* broker-0 broker-1 broker-2 broker-3 broker-4
* p0 p1 p2 p3 p4 (1st replica)
* p5 p6 p7 p8 p9 (1st replica)
* p4 p0 p1 p2 p3 (2nd replica)
* p8 p9 p5 p6 p7 (2nd replica)
* p3 p4 p0 p1 p2 (3nd replica)
* p7 p8 p9 p5 p6 (3nd replica)
*/
def assignReplicasToBrokers(brokerList: Seq[Int], nPartitions: Int, replicationFactor: Int,
fixedStartIndex: Int = -1, startPartitionId: Int = -1)
: Map[Int, Seq[Int]] = {
if (nPartitions <= 0)
throw new AdministrationException("number of partitions must be larger than 0")
if (replicationFactor <= 0)
throw new AdministrationException("replication factor must be larger than 0")
if (replicationFactor > brokerList.size)
throw new AdministrationException("replication factor: " + replicationFactor +
" larger than available brokers: " + brokerList.size)
val ret = new mutable.HashMap[Int, List[Int]]()
val startIndex = if (fixedStartIndex >= 0) fixedStartIndex else rand.nextInt(brokerList.size)
var currentPartitionId = if (startPartitionId >= 0) startPartitionId else 0
var nextReplicaShift = if (fixedStartIndex >= 0) fixedStartIndex else rand.nextInt(brokerList.size)
for (i <- 0 until nPartitions) {
if (currentPartitionId > 0 && (currentPartitionId % brokerList.size == 0))
nextReplicaShift += 1
val firstReplicaIndex = (currentPartitionId + startIndex) % brokerList.size
var replicaList = List(brokerList(firstReplicaIndex))
for (j <- 0 until replicationFactor - 1)
replicaList ::= brokerList(getWrappedIndex(firstReplicaIndex, nextReplicaShift, j, brokerList.size))
ret.put(currentPartitionId, replicaList.reverse)
currentPartitionId = currentPartitionId + 1
}
ret.toMap
}
def createOrUpdateTopicPartitionAssignmentPathInZK(topic: String, replicaAssignment: Map[Int, Seq[Int]], zkClient: ZkClient, update: Boolean = false) {
try {
val zkPath = ZkUtils.getTopicPath(topic)
val jsonPartitionData = ZkUtils.replicaAssignmentZkdata(replicaAssignment.map(e => (e._1.toString -> e._2)))
if (!update) {
info("Topic creation " + jsonPartitionData.toString)
ZkUtils.createPersistentPath(zkClient, zkPath, jsonPartitionData)
} else {
info("Topic update " + jsonPartitionData.toString)
ZkUtils.updatePersistentPath(zkClient, zkPath, jsonPartitionData)
}
debug("Updated path %s with %s for replica assignment".format(zkPath, jsonPartitionData))
} catch {
case e: ZkNodeExistsException => throw new TopicExistsException("topic %s already exists".format(topic))
case e2: Throwable => throw new AdministrationException(e2.toString)
}
}
private def getWrappedIndex(firstReplicaIndex: Int, secondReplicaShift: Int, replicaIndex: Int, nBrokers: Int): Int = {
val shift = 1 + (secondReplicaShift + replicaIndex) % (nBrokers - 1)
(firstReplicaIndex + shift) % nBrokers
}
}
class AdministrationException(val errorMessage: String) extends RuntimeException(errorMessage) {
def this() = this(null)
}
|
archieco/kafka
|
core/src/main/scala/kafka/admin/AdminUtils.scala
|
Scala
|
apache-2.0
| 4,959
|
package com.twitter.util
import org.apache.thrift.TBase
import org.apache.thrift.protocol.{TBinaryProtocol, TCompactProtocol, TProtocolFactory}
object ThriftCodec {
def apply[T <: TBase[_, _]: Manifest, P <: TProtocolFactory: Manifest]: ThriftCodec[T, P] =
new ThriftCodec[T, P]
}
class ThriftCodec[T <: TBase[_, _]: Manifest, P <: TProtocolFactory: Manifest]
extends Codec[T, Array[Byte]]
with ThriftSerializer {
protected lazy val prototype: T =
manifest[T].runtimeClass.asInstanceOf[Class[T]].newInstance
override lazy val protocolFactory: TProtocolFactory =
manifest[P].runtimeClass.asInstanceOf[Class[P]].newInstance
override def encode(item: T): Array[Byte] = toBytes(item)
override def decode(bytes: Array[Byte]): T = {
val obj = prototype.deepCopy
fromBytes(obj, bytes)
obj.asInstanceOf[T]
}
}
class BinaryThriftCodec[T <: TBase[_, _]: Manifest]
extends ThriftCodec[T, TBinaryProtocol.Factory]
class CompactThriftCodec[T <: TBase[_, _]: Manifest]
extends ThriftCodec[T, TCompactProtocol.Factory]
|
edombowsky/util
|
util-thrift/src/main/scala/com/twitter/util/ThriftCodec.scala
|
Scala
|
apache-2.0
| 1,057
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.sql.{Date, Timestamp}
import java.time.{Duration, Instant, LocalDate, LocalDateTime, Period}
import java.time.temporal.ChronoUnit
import java.util.Locale
import org.apache.hadoop.io.{LongWritable, Text}
import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkException
import org.apache.spark.sql.UpdateFieldsBenchmark._
import org.apache.spark.sql.catalyst.expressions.{InSet, Literal, NamedExpression}
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.{outstandingTimezonesIds, outstandingZoneIds}
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.execution.ProjectExec
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
import org.apache.spark.sql.types.DayTimeIntervalType.DAY
import org.apache.spark.unsafe.types.UTF8String
class ColumnExpressionSuite extends QueryTest with SharedSparkSession {
import testImplicits._
private lazy val booleanData = {
spark.createDataFrame(sparkContext.parallelize(
Row(false, false) ::
Row(false, true) ::
Row(true, false) ::
Row(true, true) :: Nil),
StructType(Seq(StructField("a", BooleanType), StructField("b", BooleanType))))
}
private lazy val nullData = Seq(
(Some(1), Some(1)), (Some(1), Some(2)), (Some(1), None), (None, None)).toDF("a", "b")
test("column names with space") {
val df = Seq((1, "a")).toDF("name with space", "name.with.dot")
checkAnswer(
df.select(df("name with space")),
Row(1) :: Nil)
checkAnswer(
df.select($"name with space"),
Row(1) :: Nil)
checkAnswer(
df.select(col("name with space")),
Row(1) :: Nil)
checkAnswer(
df.select("name with space"),
Row(1) :: Nil)
checkAnswer(
df.select(expr("`name with space`")),
Row(1) :: Nil)
}
test("column names with dot") {
val df = Seq((1, "a")).toDF("name with space", "name.with.dot").as("a")
checkAnswer(
df.select(df("`name.with.dot`")),
Row("a") :: Nil)
checkAnswer(
df.select($"`name.with.dot`"),
Row("a") :: Nil)
checkAnswer(
df.select(col("`name.with.dot`")),
Row("a") :: Nil)
checkAnswer(
df.select("`name.with.dot`"),
Row("a") :: Nil)
checkAnswer(
df.select(expr("`name.with.dot`")),
Row("a") :: Nil)
checkAnswer(
df.select(df("a.`name.with.dot`")),
Row("a") :: Nil)
checkAnswer(
df.select($"a.`name.with.dot`"),
Row("a") :: Nil)
checkAnswer(
df.select(col("a.`name.with.dot`")),
Row("a") :: Nil)
checkAnswer(
df.select("a.`name.with.dot`"),
Row("a") :: Nil)
checkAnswer(
df.select(expr("a.`name.with.dot`")),
Row("a") :: Nil)
}
test("alias and name") {
val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList")
assert(df.select(df("a").as("b")).columns.head === "b")
assert(df.select(df("a").alias("b")).columns.head === "b")
assert(df.select(df("a").name("b")).columns.head === "b")
}
test("as propagates metadata") {
val metadata = new MetadataBuilder
metadata.putString("key", "value")
val origCol = $"a".as("b", metadata.build())
val newCol = origCol.as("c")
assert(newCol.expr.asInstanceOf[NamedExpression].metadata.getString("key") === "value")
}
test("collect on column produced by a binary operator") {
val df = Seq((1, 2, 3)).toDF("a", "b", "c")
checkAnswer(df.select(df("a") + df("b")), Seq(Row(3)))
checkAnswer(df.select(df("a") + df("b").as("c")), Seq(Row(3)))
}
test("star") {
checkAnswer(testData.select($"*"), testData.collect().toSeq)
}
test("star qualified by data frame object") {
val df = testData.toDF
val goldAnswer = df.collect().toSeq
checkAnswer(df.select(df("*")), goldAnswer)
val df1 = df.select(df("*"), lit("abcd").as("litCol"))
checkAnswer(df1.select(df("*")), goldAnswer)
}
test("star qualified by table name") {
checkAnswer(testData.as("testData").select($"testData.*"), testData.collect().toSeq)
}
test("SPARK-34199: star can be qualified by table name inside a non-count function") {
checkAnswer(
testData.as("testData").selectExpr("hash(testData.*)"),
testData.as("testData").selectExpr("hash(testData.key, testData.value)")
)
}
test("SPARK-34199: star cannot be qualified by table name inside a count function") {
val e = intercept[AnalysisException] {
testData.as("testData").selectExpr("count(testData.*)").collect()
}
assert(e.getMessage.contains(
"count(testData.*) is not allowed. Please use count(*) or expand the columns manually"))
}
test("SPARK-34199: table star can be qualified inside a count function with multiple arguments") {
checkAnswer(
testData.as("testData").selectExpr("count(testData.*, testData.key)"),
testData.as("testData").selectExpr("count(testData.key, testData.value, testData.key)")
)
}
test("+") {
checkAnswer(
testData2.select($"a" + 1),
testData2.collect().toSeq.map(r => Row(r.getInt(0) + 1)))
checkAnswer(
testData2.select($"a" + $"b" + 2),
testData2.collect().toSeq.map(r => Row(r.getInt(0) + r.getInt(1) + 2)))
}
test("-") {
checkAnswer(
testData2.select($"a" - 1),
testData2.collect().toSeq.map(r => Row(r.getInt(0) - 1)))
checkAnswer(
testData2.select($"a" - $"b" - 2),
testData2.collect().toSeq.map(r => Row(r.getInt(0) - r.getInt(1) - 2)))
}
test("*") {
checkAnswer(
testData2.select($"a" * 10),
testData2.collect().toSeq.map(r => Row(r.getInt(0) * 10)))
checkAnswer(
testData2.select($"a" * $"b"),
testData2.collect().toSeq.map(r => Row(r.getInt(0) * r.getInt(1))))
}
test("/") {
checkAnswer(
testData2.select($"a" / 2),
testData2.collect().toSeq.map(r => Row(r.getInt(0).toDouble / 2)))
checkAnswer(
testData2.select($"a" / $"b"),
testData2.collect().toSeq.map(r => Row(r.getInt(0).toDouble / r.getInt(1))))
}
test("%") {
checkAnswer(
testData2.select($"a" % 2),
testData2.collect().toSeq.map(r => Row(r.getInt(0) % 2)))
checkAnswer(
testData2.select($"a" % $"b"),
testData2.collect().toSeq.map(r => Row(r.getInt(0) % r.getInt(1))))
}
test("unary -") {
checkAnswer(
testData2.select(-$"a"),
testData2.collect().toSeq.map(r => Row(-r.getInt(0))))
}
test("unary !") {
checkAnswer(
complexData.select(!$"b"),
complexData.collect().toSeq.map(r => Row(!r.getBoolean(3))))
}
test("isNull") {
checkAnswer(
nullStrings.toDF.where($"s".isNull),
nullStrings.collect().toSeq.filter(r => r.getString(1) eq null))
checkAnswer(
sql("select isnull(null), isnull(1)"),
Row(true, false))
}
test("isNotNull") {
checkAnswer(
nullStrings.toDF.where($"s".isNotNull),
nullStrings.collect().toSeq.filter(r => r.getString(1) ne null))
checkAnswer(
sql("select isnotnull(null), isnotnull('a')"),
Row(false, true))
}
test("isNaN") {
val testData = spark.createDataFrame(sparkContext.parallelize(
Row(Double.NaN, Float.NaN) ::
Row(math.log(-1), math.log(-3).toFloat) ::
Row(null, null) ::
Row(Double.MaxValue, Float.MinValue):: Nil),
StructType(Seq(StructField("a", DoubleType), StructField("b", FloatType))))
checkAnswer(
testData.select($"a".isNaN, $"b".isNaN),
Row(true, true) :: Row(true, true) :: Row(false, false) :: Row(false, false) :: Nil)
checkAnswer(
testData.select(isnan($"a"), isnan($"b")),
Row(true, true) :: Row(true, true) :: Row(false, false) :: Row(false, false) :: Nil)
if (!conf.ansiEnabled) {
checkAnswer(
sql("select isnan(15), isnan('invalid')"),
Row(false, false))
}
}
test("nanvl") {
withTempView("t") {
val testData = spark.createDataFrame(sparkContext.parallelize(
Row(null, 3.0, Double.NaN, Double.PositiveInfinity, 1.0f, 4) :: Nil),
StructType(Seq(StructField("a", DoubleType), StructField("b", DoubleType),
StructField("c", DoubleType), StructField("d", DoubleType),
StructField("e", FloatType), StructField("f", IntegerType))))
checkAnswer(
testData.select(
nanvl($"a", lit(5)), nanvl($"b", lit(10)), nanvl(lit(10), $"b"),
nanvl($"c", lit(null).cast(DoubleType)), nanvl($"d", lit(10)),
nanvl($"b", $"e"), nanvl($"e", $"f")),
Row(null, 3.0, 10.0, null, Double.PositiveInfinity, 3.0, 1.0)
)
testData.createOrReplaceTempView("t")
checkAnswer(
sql(
"select nanvl(a, 5), nanvl(b, 10), nanvl(10, b), nanvl(c, null), nanvl(d, 10), " +
" nanvl(b, e), nanvl(e, f) from t"),
Row(null, 3.0, 10.0, null, Double.PositiveInfinity, 3.0, 1.0)
)
}
}
test("===") {
checkAnswer(
testData2.filter($"a" === 1),
testData2.collect().toSeq.filter(r => r.getInt(0) == 1))
checkAnswer(
testData2.filter($"a" === $"b"),
testData2.collect().toSeq.filter(r => r.getInt(0) == r.getInt(1)))
}
test("<=>") {
checkAnswer(
nullData.filter($"b" <=> 1),
Row(1, 1) :: Nil)
checkAnswer(
nullData.filter($"b" <=> null),
Row(1, null) :: Row(null, null) :: Nil)
checkAnswer(
nullData.filter($"a" <=> $"b"),
Row(1, 1) :: Row(null, null) :: Nil)
val nullData2 = spark.createDataFrame(sparkContext.parallelize(
Row("abc") ::
Row(null) ::
Row("xyz") :: Nil),
StructType(Seq(StructField("a", StringType, true))))
checkAnswer(
nullData2.filter($"a" <=> null),
Row(null) :: Nil)
}
test("=!=") {
checkAnswer(
nullData.filter($"b" =!= 1),
Row(1, 2) :: Nil)
checkAnswer(nullData.filter($"b" =!= null), Nil)
checkAnswer(
nullData.filter($"a" =!= $"b"),
Row(1, 2) :: Nil)
}
test(">") {
checkAnswer(
testData2.filter($"a" > 1),
testData2.collect().toSeq.filter(r => r.getInt(0) > 1))
checkAnswer(
testData2.filter($"a" > $"b"),
testData2.collect().toSeq.filter(r => r.getInt(0) > r.getInt(1)))
}
test(">=") {
checkAnswer(
testData2.filter($"a" >= 1),
testData2.collect().toSeq.filter(r => r.getInt(0) >= 1))
checkAnswer(
testData2.filter($"a" >= $"b"),
testData2.collect().toSeq.filter(r => r.getInt(0) >= r.getInt(1)))
}
test("<") {
checkAnswer(
testData2.filter($"a" < 2),
testData2.collect().toSeq.filter(r => r.getInt(0) < 2))
checkAnswer(
testData2.filter($"a" < $"b"),
testData2.collect().toSeq.filter(r => r.getInt(0) < r.getInt(1)))
}
test("<=") {
checkAnswer(
testData2.filter($"a" <= 2),
testData2.collect().toSeq.filter(r => r.getInt(0) <= 2))
checkAnswer(
testData2.filter($"a" <= $"b"),
testData2.collect().toSeq.filter(r => r.getInt(0) <= r.getInt(1)))
}
test("between") {
val testData = sparkContext.parallelize(
(0, 1, 2) ::
(1, 2, 3) ::
(2, 1, 0) ::
(2, 2, 4) ::
(3, 1, 6) ::
(3, 2, 0) :: Nil).toDF("a", "b", "c")
val expectAnswer = testData.collect().toSeq.
filter(r => r.getInt(0) >= r.getInt(1) && r.getInt(0) <= r.getInt(2))
checkAnswer(testData.filter($"a".between($"b", $"c")), expectAnswer)
}
test("in") {
val df = Seq((1, "x"), (2, "y"), (3, "z")).toDF("a", "b")
checkAnswer(df.filter($"a".isin(1, 2)),
df.collect().toSeq.filter(r => r.getInt(0) == 1 || r.getInt(0) == 2))
checkAnswer(df.filter($"a".isin(3, 2)),
df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 2))
checkAnswer(df.filter($"a".isin(3, 1)),
df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 1))
checkAnswer(df.filter($"b".isin("y", "x")),
df.collect().toSeq.filter(r => r.getString(1) == "y" || r.getString(1) == "x"))
checkAnswer(df.filter($"b".isin("z", "x")),
df.collect().toSeq.filter(r => r.getString(1) == "z" || r.getString(1) == "x"))
checkAnswer(df.filter($"b".isin("z", "y")),
df.collect().toSeq.filter(r => r.getString(1) == "z" || r.getString(1) == "y"))
// Auto casting should work with mixture of different types in collections
checkAnswer(df.filter($"a".isin(1.toShort, "2")),
df.collect().toSeq.filter(r => r.getInt(0) == 1 || r.getInt(0) == 2))
checkAnswer(df.filter($"a".isin("3", 2.toLong)),
df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 2))
checkAnswer(df.filter($"a".isin(3, "1")),
df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 1))
val df2 = Seq((1, Seq(1)), (2, Seq(2)), (3, Seq(3))).toDF("a", "b")
val e = intercept[AnalysisException] {
df2.filter($"a".isin($"b"))
}
Seq("cannot resolve", "due to data type mismatch: Arguments must be same type but were")
.foreach { s =>
assert(e.getMessage.toLowerCase(Locale.ROOT).contains(s.toLowerCase(Locale.ROOT)))
}
}
test("IN/INSET with bytes, shorts, ints, dates") {
def check(): Unit = {
val values = Seq(
(Byte.MinValue, Some(Short.MinValue), Int.MinValue, Date.valueOf("2017-01-01")),
(Byte.MaxValue, None, Int.MaxValue, null))
val df = values.toDF("b", "s", "i", "d")
checkAnswer(df.select($"b".isin(Byte.MinValue, Byte.MaxValue)), Seq(Row(true), Row(true)))
checkAnswer(df.select($"b".isin(-1.toByte, 2.toByte)), Seq(Row(false), Row(false)))
checkAnswer(df.select($"s".isin(Short.MinValue, 1.toShort)), Seq(Row(true), Row(null)))
checkAnswer(df.select($"s".isin(0.toShort, null)), Seq(Row(null), Row(null)))
checkAnswer(df.select($"i".isin(0, Int.MinValue)), Seq(Row(true), Row(false)))
checkAnswer(df.select($"i".isin(null, Int.MinValue)), Seq(Row(true), Row(null)))
checkAnswer(
df.select($"d".isin(Date.valueOf("1950-01-01"), Date.valueOf("2017-01-01"))),
Seq(Row(true), Row(null)))
checkAnswer(
df.select($"d".isin(Date.valueOf("1950-01-01"), null)),
Seq(Row(null), Row(null)))
}
withSQLConf(SQLConf.OPTIMIZER_INSET_CONVERSION_THRESHOLD.key -> "10") {
check()
}
withSQLConf(
SQLConf.OPTIMIZER_INSET_CONVERSION_THRESHOLD.key -> "0",
SQLConf.OPTIMIZER_INSET_SWITCH_THRESHOLD.key -> "0") {
check()
}
withSQLConf(
SQLConf.OPTIMIZER_INSET_CONVERSION_THRESHOLD.key -> "0",
SQLConf.OPTIMIZER_INSET_SWITCH_THRESHOLD.key -> "20") {
check()
}
}
test("isInCollection: Scala Collection") {
Seq(0, 1, 10).foreach { optThreshold =>
Seq(0, 1, 10).foreach { switchThreshold =>
withSQLConf(
SQLConf.OPTIMIZER_INSET_CONVERSION_THRESHOLD.key -> optThreshold.toString,
SQLConf.OPTIMIZER_INSET_SWITCH_THRESHOLD.key -> switchThreshold.toString) {
val df = Seq((1, "x"), (2, "y"), (3, "z")).toDF("a", "b")
// Test with different types of collections
checkAnswer(df.filter($"a".isInCollection(Seq(3, 1))),
df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 1))
checkAnswer(df.filter($"a".isInCollection(Seq(1, 2).toSet)),
df.collect().toSeq.filter(r => r.getInt(0) == 1 || r.getInt(0) == 2))
checkAnswer(df.filter($"a".isInCollection(Seq(3, 2).toArray)),
df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 2))
checkAnswer(df.filter($"a".isInCollection(Seq(3, 1).toList)),
df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 1))
val df2 = Seq((1, Seq(1)), (2, Seq(2)), (3, Seq(3))).toDF("a", "b")
val e = intercept[AnalysisException] {
df2.filter($"a".isInCollection(Seq($"b")))
}
Seq("cannot resolve", "due to data type mismatch: Arguments must be same type but were")
.foreach { s =>
assert(e.getMessage.toLowerCase(Locale.ROOT).contains(s.toLowerCase(Locale.ROOT)))
}
}
}
}
}
test("SPARK-31553: isInCollection - collection element types") {
val expected = Seq(Row(true), Row(false))
Seq(0, 1, 10).foreach { optThreshold =>
Seq(0, 1, 10).foreach { switchThreshold =>
withSQLConf(
SQLConf.OPTIMIZER_INSET_CONVERSION_THRESHOLD.key -> optThreshold.toString,
SQLConf.OPTIMIZER_INSET_SWITCH_THRESHOLD.key -> switchThreshold.toString) {
checkAnswer(Seq(0).toDS.select($"value".isInCollection(Seq(null))), Seq(Row(null)))
checkAnswer(
Seq(true).toDS.select($"value".isInCollection(Seq(true, false))),
Seq(Row(true)))
checkAnswer(
Seq(0.toByte, 1.toByte).toDS.select($"value".isInCollection(Seq(0.toByte, 2.toByte))),
expected)
checkAnswer(
Seq(0.toShort, 1.toShort).toDS
.select($"value".isInCollection(Seq(0.toShort, 2.toShort))),
expected)
checkAnswer(Seq(0, 1).toDS.select($"value".isInCollection(Seq(0, 2))), expected)
checkAnswer(Seq(0L, 1L).toDS.select($"value".isInCollection(Seq(0L, 2L))), expected)
checkAnswer(Seq(0.0f, 1.0f).toDS
.select($"value".isInCollection(Seq(0.0f, 2.0f))), expected)
checkAnswer(Seq(0.0D, 1.0D).toDS
.select($"value".isInCollection(Seq(0.0D, 2.0D))), expected)
checkAnswer(
Seq(BigDecimal(0), BigDecimal(2)).toDS
.select($"value".isInCollection(Seq(BigDecimal(0), BigDecimal(1)))),
expected)
checkAnswer(
Seq("abc", "def").toDS.select($"value".isInCollection(Seq("abc", "xyz"))),
expected)
checkAnswer(
Seq(Date.valueOf("2020-04-29"), Date.valueOf("2020-05-01")).toDS
.select($"value".isInCollection(
Seq(Date.valueOf("2020-04-29"), Date.valueOf("2020-04-30")))),
expected)
checkAnswer(
Seq(new Timestamp(0), new Timestamp(2)).toDS
.select($"value".isInCollection(Seq(new Timestamp(0), new Timestamp(1)))),
expected)
checkAnswer(
Seq(Array("a", "b"), Array("c", "d")).toDS
.select($"value".isInCollection(Seq(Array("a", "b"), Array("x", "z")))),
expected)
}
}
}
}
test("&&") {
checkAnswer(
booleanData.filter($"a" && true),
Row(true, false) :: Row(true, true) :: Nil)
checkAnswer(
booleanData.filter($"a" && false),
Nil)
checkAnswer(
booleanData.filter($"a" && $"b"),
Row(true, true) :: Nil)
}
test("||") {
checkAnswer(
booleanData.filter($"a" || true),
booleanData.collect())
checkAnswer(
booleanData.filter($"a" || false),
Row(true, false) :: Row(true, true) :: Nil)
checkAnswer(
booleanData.filter($"a" || $"b"),
Row(false, true) :: Row(true, false) :: Row(true, true) :: Nil)
}
test("SPARK-7321 when conditional statements") {
val testData = (1 to 3).map(i => (i, i.toString)).toDF("key", "value")
checkAnswer(
testData.select(when($"key" === 1, -1).when($"key" === 2, -2).otherwise(0)),
Seq(Row(-1), Row(-2), Row(0))
)
// Without the ending otherwise, return null for unmatched conditions.
// Also test putting a non-literal value in the expression.
checkAnswer(
testData.select(when($"key" === 1, lit(0) - $"key").when($"key" === 2, -2)),
Seq(Row(-1), Row(-2), Row(null))
)
// Test error handling for invalid expressions.
intercept[IllegalArgumentException] { $"key".when($"key" === 1, -1) }
intercept[IllegalArgumentException] { $"key".otherwise(-1) }
intercept[IllegalArgumentException] { when($"key" === 1, -1).otherwise(-1).otherwise(-1) }
}
test("sqrt") {
checkAnswer(
testData.select(sqrt($"key")).orderBy($"key".asc),
(1 to 100).map(n => Row(math.sqrt(n)))
)
checkAnswer(
testData.select(sqrt($"value"), $"key").orderBy($"key".asc, $"value".asc),
(1 to 100).map(n => Row(math.sqrt(n), n))
)
checkAnswer(
testData.select(sqrt(lit(null))),
(1 to 100).map(_ => Row(null))
)
}
test("upper") {
checkAnswer(
lowerCaseData.select(upper($"l")),
('a' to 'd').map(c => Row(c.toString.toUpperCase(Locale.ROOT)))
)
checkAnswer(
testData.select(upper($"value"), $"key"),
(1 to 100).map(n => Row(n.toString, n))
)
checkAnswer(
testData.select(upper(lit(null))),
(1 to 100).map(n => Row(null))
)
checkAnswer(
sql("SELECT upper('aB'), ucase('cDe')"),
Row("AB", "CDE"))
}
test("lower") {
checkAnswer(
upperCaseData.select(lower($"L")),
('A' to 'F').map(c => Row(c.toString.toLowerCase(Locale.ROOT)))
)
checkAnswer(
testData.select(lower($"value"), $"key"),
(1 to 100).map(n => Row(n.toString, n))
)
checkAnswer(
testData.select(lower(lit(null))),
(1 to 100).map(n => Row(null))
)
checkAnswer(
sql("SELECT lower('aB'), lcase('cDe')"),
Row("ab", "cde"))
}
test("monotonically_increasing_id") {
// Make sure we have 2 partitions, each with 2 records.
val df = sparkContext.parallelize(Seq[Int](), 2).mapPartitions { _ =>
Iterator(Tuple1(1), Tuple1(2))
}.toDF("a")
checkAnswer(
df.select(monotonically_increasing_id(), expr("monotonically_increasing_id()")),
Row(0L, 0L) ::
Row(1L, 1L) ::
Row((1L << 33) + 0L, (1L << 33) + 0L) ::
Row((1L << 33) + 1L, (1L << 33) + 1L) :: Nil
)
}
test("spark_partition_id") {
// Make sure we have 2 partitions, each with 2 records.
val df = sparkContext.parallelize(Seq[Int](), 2).mapPartitions { _ =>
Iterator(Tuple1(1), Tuple1(2))
}.toDF("a")
checkAnswer(
df.select(spark_partition_id()),
Row(0) :: Row(0) :: Row(1) :: Row(1) :: Nil
)
}
test("input_file_name, input_file_block_start, input_file_block_length - more than one source") {
withTempView("tempView1") {
withTable("tab1", "tab2") {
val data = sparkContext.parallelize(0 to 9).toDF("id")
data.write.saveAsTable("tab1")
data.write.saveAsTable("tab2")
data.createOrReplaceTempView("tempView1")
Seq("input_file_name", "input_file_block_start", "input_file_block_length").foreach { f =>
val e = intercept[AnalysisException] {
sql(s"SELECT *, $f() FROM tab1 JOIN tab2 ON tab1.id = tab2.id")
}.getMessage
assert(e.contains(s"'$f' does not support more than one source"))
}
def checkResult(
fromClause: String,
exceptionExpected: Boolean,
numExpectedRows: Int = 0): Unit = {
val stmt = s"SELECT *, input_file_name() FROM ($fromClause)"
if (exceptionExpected) {
val e = intercept[AnalysisException](sql(stmt)).getMessage
assert(e.contains("'input_file_name' does not support more than one source"))
} else {
assert(sql(stmt).count() == numExpectedRows)
}
}
checkResult(
"SELECT * FROM tab1 UNION ALL SELECT * FROM tab2 UNION ALL SELECT * FROM tab2",
exceptionExpected = false,
numExpectedRows = 30)
checkResult(
"(SELECT * FROM tempView1 NATURAL JOIN tab2) UNION ALL SELECT * FROM tab2",
exceptionExpected = false,
numExpectedRows = 20)
checkResult(
"(SELECT * FROM tab1 UNION ALL SELECT * FROM tab2) NATURAL JOIN tempView1",
exceptionExpected = false,
numExpectedRows = 20)
checkResult(
"(SELECT * FROM tempView1 UNION ALL SELECT * FROM tab2) NATURAL JOIN tab2",
exceptionExpected = true)
checkResult(
"(SELECT * FROM tab1 NATURAL JOIN tab2) UNION ALL SELECT * FROM tab2",
exceptionExpected = true)
checkResult(
"(SELECT * FROM tab1 UNION ALL SELECT * FROM tab2) NATURAL JOIN tab2",
exceptionExpected = true)
}
}
}
test("input_file_name, input_file_block_start, input_file_block_length - FileScanRDD") {
withTempPath { dir =>
val data = sparkContext.parallelize(0 to 10).toDF("id")
data.write.parquet(dir.getCanonicalPath)
// Test the 3 expressions when reading from files
val q = spark.read.parquet(dir.getCanonicalPath).select(
input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()"))
val firstRow = q.head()
assert(firstRow.getString(0).contains(dir.toURI.getPath))
assert(firstRow.getLong(1) == 0)
assert(firstRow.getLong(2) > 0)
// Now read directly from the original RDD without going through any files to make sure
// we are returning empty string, -1, and -1.
checkAnswer(
data.select(
input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()")
).limit(1),
Row("", -1L, -1L))
}
}
test("input_file_name, input_file_block_start, input_file_block_length - HadoopRDD") {
withTempPath { dir =>
val data = sparkContext.parallelize((0 to 10).map(_.toString)).toDF()
data.write.text(dir.getCanonicalPath)
val df = spark.sparkContext.textFile(dir.getCanonicalPath).toDF()
// Test the 3 expressions when reading from files
val q = df.select(
input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()"))
val firstRow = q.head()
assert(firstRow.getString(0).contains(dir.toURI.getPath))
assert(firstRow.getLong(1) == 0)
assert(firstRow.getLong(2) > 0)
// Now read directly from the original RDD without going through any files to make sure
// we are returning empty string, -1, and -1.
checkAnswer(
data.select(
input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()")
).limit(1),
Row("", -1L, -1L))
}
}
test("input_file_name, input_file_block_start, input_file_block_length - NewHadoopRDD") {
withTempPath { dir =>
val data = sparkContext.parallelize((0 to 10).map(_.toString)).toDF()
data.write.text(dir.getCanonicalPath)
val rdd = spark.sparkContext.newAPIHadoopFile(
dir.getCanonicalPath,
classOf[NewTextInputFormat],
classOf[LongWritable],
classOf[Text])
val df = rdd.map(pair => pair._2.toString).toDF()
// Test the 3 expressions when reading from files
val q = df.select(
input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()"))
val firstRow = q.head()
assert(firstRow.getString(0).contains(dir.toURI.getPath))
assert(firstRow.getLong(1) == 0)
assert(firstRow.getLong(2) > 0)
// Now read directly from the original RDD without going through any files to make sure
// we are returning empty string, -1, and -1.
checkAnswer(
data.select(
input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()")
).limit(1),
Row("", -1L, -1L))
}
}
test("columns can be compared") {
assert($"key".desc == $"key".desc)
assert($"key".desc != $"key".asc)
}
test("alias with metadata") {
val metadata = new MetadataBuilder()
.putString("originName", "value")
.build()
val schema = testData
.select($"*", col("value").as("abc", metadata))
.schema
assert(schema("value").metadata === Metadata.empty)
assert(schema("abc").metadata === metadata)
}
test("rand") {
val randCol = testData.select($"key", rand(5L).as("rand"))
randCol.columns.length should be (2)
val rows = randCol.collect()
rows.foreach { row =>
assert(row.getDouble(1) <= 1.0)
assert(row.getDouble(1) >= 0.0)
}
def checkNumProjects(df: DataFrame, expectedNumProjects: Int): Unit = {
val projects = df.queryExecution.sparkPlan.collect {
case tungstenProject: ProjectExec => tungstenProject
}
assert(projects.size === expectedNumProjects)
}
// We first create a plan with two Projects.
// Project [rand + 1 AS rand1, rand - 1 AS rand2]
// Project [key, (Rand 5 + 1) AS rand]
// LogicalRDD [key, value]
// Because Rand function is not deterministic, the column rand is not deterministic.
// So, in the optimizer, we will not collapse Project [rand + 1 AS rand1, rand - 1 AS rand2]
// and Project [key, Rand 5 AS rand]. The final plan still has two Projects.
val dfWithTwoProjects =
testData
.select($"key", (rand(5L) + 1).as("rand"))
.select(($"rand" + 1).as("rand1"), ($"rand" - 1).as("rand2"))
checkNumProjects(dfWithTwoProjects, 2)
// Now, we add one more project rand1 - rand2 on top of the query plan.
// Since rand1 and rand2 are deterministic (they basically apply +/- to the generated
// rand value), we can collapse rand1 - rand2 to the Project generating rand1 and rand2.
// So, the plan will be optimized from ...
// Project [(rand1 - rand2) AS (rand1 - rand2)]
// Project [rand + 1 AS rand1, rand - 1 AS rand2]
// Project [key, (Rand 5 + 1) AS rand]
// LogicalRDD [key, value]
// to ...
// Project [((rand + 1 AS rand1) - (rand - 1 AS rand2)) AS (rand1 - rand2)]
// Project [key, Rand 5 AS rand]
// LogicalRDD [key, value]
val dfWithThreeProjects = dfWithTwoProjects.select($"rand1" - $"rand2")
checkNumProjects(dfWithThreeProjects, 2)
dfWithThreeProjects.collect().foreach { row =>
assert(row.getDouble(0) === 2.0 +- 0.0001)
}
}
test("randn") {
val randCol = testData.select($"key", randn(5L).as("rand"))
randCol.columns.length should be (2)
val rows = randCol.collect()
rows.foreach { row =>
assert(row.getDouble(1) <= 4.0)
assert(row.getDouble(1) >= -4.0)
}
}
test("bitwiseAND") {
checkAnswer(
testData2.select($"a".bitwiseAND(75)),
testData2.collect().toSeq.map(r => Row(r.getInt(0) & 75)))
checkAnswer(
testData2.select($"a".bitwiseAND($"b").bitwiseAND(22)),
testData2.collect().toSeq.map(r => Row(r.getInt(0) & r.getInt(1) & 22)))
}
test("bitwiseOR") {
checkAnswer(
testData2.select($"a".bitwiseOR(170)),
testData2.collect().toSeq.map(r => Row(r.getInt(0) | 170)))
checkAnswer(
testData2.select($"a".bitwiseOR($"b").bitwiseOR(42)),
testData2.collect().toSeq.map(r => Row(r.getInt(0) | r.getInt(1) | 42)))
}
test("bitwiseXOR") {
checkAnswer(
testData2.select($"a".bitwiseXOR(112)),
testData2.collect().toSeq.map(r => Row(r.getInt(0) ^ 112)))
checkAnswer(
testData2.select($"a".bitwiseXOR($"b").bitwiseXOR(39)),
testData2.collect().toSeq.map(r => Row(r.getInt(0) ^ r.getInt(1) ^ 39)))
}
test("SPARK-37646: lit") {
assert(lit($"foo") == $"foo")
assert(lit(Symbol("foo")) == $"foo")
assert(lit(1) == Column(Literal(1)))
assert(lit(null) == Column(Literal(null, NullType)))
}
test("typedLit") {
assert(typedLit($"foo") == $"foo")
assert(typedLit(Symbol("foo")) == $"foo")
assert(typedLit(1) == Column(Literal(1)))
assert(typedLit[String](null) == Column(Literal(null, StringType)))
val df = Seq(Tuple1(0)).toDF("a")
// Only check the types `lit` cannot handle
checkAnswer(
df.select(typedLit(Seq(1, 2, 3))),
Row(Seq(1, 2, 3)) :: Nil)
checkAnswer(
df.select(typedLit(Map("a" -> 1, "b" -> 2))),
Row(Map("a" -> 1, "b" -> 2)) :: Nil)
checkAnswer(
df.select(typedLit(("a", 2, 1.0))),
Row(Row("a", 2, 1.0)) :: Nil)
}
test("SPARK-31563: sql of InSet for UTF8String collection") {
val inSet = InSet(Literal("a"), Set("a", "b").map(UTF8String.fromString))
assert(inSet.sql === "('a' IN ('a', 'b'))")
}
def checkAnswer(
df: => DataFrame,
expectedAnswer: Seq[Row],
expectedSchema: StructType): Unit = {
checkAnswer(df, expectedAnswer)
assert(df.schema == expectedSchema)
}
private lazy val structType = StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = false)))
private lazy val structLevel1: DataFrame = spark.createDataFrame(
sparkContext.parallelize(Row(Row(1, null, 3)) :: Nil),
StructType(Seq(StructField("a", structType, nullable = false))))
private lazy val nullableStructLevel1: DataFrame = spark.createDataFrame(
sparkContext.parallelize(Row(null) :: Row(Row(1, null, 3)) :: Nil),
StructType(Seq(StructField("a", structType, nullable = true))))
private lazy val structLevel2: DataFrame = spark.createDataFrame(
sparkContext.parallelize(Row(Row(Row(1, null, 3))) :: Nil),
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", structType, nullable = false))),
nullable = false))))
private lazy val nullableStructLevel2: DataFrame = spark.createDataFrame(
sparkContext.parallelize(Row(null) :: Row(Row(null)) :: Row(Row(Row(1, null, 3))) :: Nil),
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", structType, nullable = true))),
nullable = true))))
private lazy val structLevel3: DataFrame = spark.createDataFrame(
sparkContext.parallelize(Row(Row(Row(Row(1, null, 3)))) :: Nil),
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", structType, nullable = false))),
nullable = false))),
nullable = false))))
test("withField should throw an exception if called on a non-StructType column") {
intercept[AnalysisException] {
testData.withColumn("key", $"key".withField("a", lit(2)))
}.getMessage should include("struct argument should be struct type, got: int")
}
test("withField should throw an exception if either fieldName or col argument are null") {
intercept[IllegalArgumentException] {
structLevel1.withColumn("a", $"a".withField(null, lit(2)))
}.getMessage should include("fieldName cannot be null")
intercept[IllegalArgumentException] {
structLevel1.withColumn("a", $"a".withField("b", null))
}.getMessage should include("col cannot be null")
intercept[IllegalArgumentException] {
structLevel1.withColumn("a", $"a".withField(null, null))
}.getMessage should include("fieldName cannot be null")
}
test("withField should throw an exception if any intermediate structs don't exist") {
intercept[AnalysisException] {
structLevel2.withColumn("a", Symbol("a").withField("x.b", lit(2)))
}.getMessage should include("No such struct field x in a")
intercept[AnalysisException] {
structLevel3.withColumn("a", Symbol("a").withField("a.x.b", lit(2)))
}.getMessage should include("No such struct field x in a")
}
test("withField should throw an exception if intermediate field is not a struct") {
intercept[AnalysisException] {
structLevel1.withColumn("a", Symbol("a").withField("b.a", lit(2)))
}.getMessage should include("struct argument should be struct type, got: int")
}
test("withField should throw an exception if intermediate field reference is ambiguous") {
intercept[AnalysisException] {
val structLevel2: DataFrame = spark.createDataFrame(
sparkContext.parallelize(Row(Row(Row(1, null, 3), 4)) :: Nil),
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", structType, nullable = false),
StructField("a", structType, nullable = false))),
nullable = false))))
structLevel2.withColumn("a", Symbol("a").withField("a.b", lit(2)))
}.getMessage should include("Ambiguous reference to fields")
}
test("withField should add field with no name") {
checkAnswer(
structLevel1.withColumn("a", $"a".withField("", lit(4))),
Row(Row(1, null, 3, 4)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = false),
StructField("", IntegerType, nullable = false))),
nullable = false))))
}
test("withField should add field to struct") {
checkAnswer(
structLevel1.withColumn("a", Symbol("a").withField("d", lit(4))),
Row(Row(1, null, 3, 4)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = false),
StructField("d", IntegerType, nullable = false))),
nullable = false))))
}
test("withField should add field to nullable struct") {
checkAnswer(
nullableStructLevel1.withColumn("a", $"a".withField("d", lit(4))),
Row(null) :: Row(Row(1, null, 3, 4)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = false),
StructField("d", IntegerType, nullable = false))),
nullable = true))))
}
test("withField should add field to nested nullable struct") {
checkAnswer(
nullableStructLevel2.withColumn("a", $"a".withField("a.d", lit(4))),
Row(null) :: Row(Row(null)) :: Row(Row(Row(1, null, 3, 4))) :: Nil,
StructType(
Seq(StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = false),
StructField("d", IntegerType, nullable = false))),
nullable = true))),
nullable = true))))
}
test("withField should add null field to struct") {
checkAnswer(
structLevel1.withColumn("a", Symbol("a").withField("d", lit(null).cast(IntegerType))),
Row(Row(1, null, 3, null)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = false),
StructField("d", IntegerType, nullable = true))),
nullable = false))))
}
test("withField should add multiple fields to struct") {
checkAnswer(
structLevel1.withColumn("a", Symbol("a").withField("d", lit(4)).withField("e", lit(5))),
Row(Row(1, null, 3, 4, 5)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = false),
StructField("d", IntegerType, nullable = false),
StructField("e", IntegerType, nullable = false))),
nullable = false))))
}
test("withField should add multiple fields to nullable struct") {
checkAnswer(
nullableStructLevel1.withColumn("a", Symbol("a")
.withField("d", lit(4)).withField("e", lit(5))),
Row(null) :: Row(Row(1, null, 3, 4, 5)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = false),
StructField("d", IntegerType, nullable = false),
StructField("e", IntegerType, nullable = false))),
nullable = true))))
}
test("withField should add field to nested struct") {
Seq(
structLevel2.withColumn("a", Symbol("a").withField("a.d", lit(4))),
structLevel2.withColumn("a", Symbol("a").withField("a", $"a.a".withField("d", lit(4))))
).foreach { df =>
checkAnswer(
df,
Row(Row(Row(1, null, 3, 4))) :: Nil,
StructType(
Seq(StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = false),
StructField("d", IntegerType, nullable = false))),
nullable = false))),
nullable = false))))
}
}
test("withField should add multiple fields to nested struct") {
Seq(
col("a").withField("a", $"a.a".withField("d", lit(4)).withField("e", lit(5))),
col("a").withField("a.d", lit(4)).withField("a.e", lit(5))
).foreach { column =>
checkAnswer(
structLevel2.select(column.as("a")),
Row(Row(Row(1, null, 3, 4, 5))) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = false),
StructField("d", IntegerType, nullable = false),
StructField("e", IntegerType, nullable = false))),
nullable = false))),
nullable = false))))
}
}
test("withField should add multiple fields to nested nullable struct") {
Seq(
col("a").withField("a", $"a.a".withField("d", lit(4)).withField("e", lit(5))),
col("a").withField("a.d", lit(4)).withField("a.e", lit(5))
).foreach { column =>
checkAnswer(
nullableStructLevel2.select(column.as("a")),
Row(null) :: Row(Row(null)) :: Row(Row(Row(1, null, 3, 4, 5))) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = false),
StructField("d", IntegerType, nullable = false),
StructField("e", IntegerType, nullable = false))),
nullable = true))),
nullable = true))))
}
}
test("withField should add field to deeply nested struct") {
checkAnswer(
structLevel3.withColumn("a", Symbol("a").withField("a.a.d", lit(4))),
Row(Row(Row(Row(1, null, 3, 4)))) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = false),
StructField("d", IntegerType, nullable = false))),
nullable = false))),
nullable = false))),
nullable = false))))
}
test("withField should replace field in struct") {
checkAnswer(
structLevel1.withColumn("a", Symbol("a").withField("b", lit(2))),
Row(Row(1, 2, 3)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = false))))
}
test("withField should replace field in nullable struct") {
checkAnswer(
nullableStructLevel1.withColumn("a", Symbol("a").withField("b", lit("foo"))),
Row(null) :: Row(Row(1, "foo", 3)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", StringType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = true))))
}
test("withField should replace field in nested nullable struct") {
checkAnswer(
nullableStructLevel2.withColumn("a", $"a".withField("a.b", lit("foo"))),
Row(null) :: Row(Row(null)) :: Row(Row(Row(1, "foo", 3))) :: Nil,
StructType(
Seq(StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", StringType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = true))),
nullable = true))))
}
test("withField should replace field with null value in struct") {
checkAnswer(
structLevel1.withColumn("a", Symbol("a").withField("c", lit(null).cast(IntegerType))),
Row(Row(1, null, null)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = true))),
nullable = false))))
}
test("withField should replace multiple fields in struct") {
checkAnswer(
structLevel1.withColumn("a", Symbol("a").withField("a", lit(10)).withField("b", lit(20))),
Row(Row(10, 20, 3)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = false))))
}
test("withField should replace multiple fields in nullable struct") {
checkAnswer(
nullableStructLevel1.withColumn("a", Symbol("a").withField("a", lit(10))
.withField("b", lit(20))),
Row(null) :: Row(Row(10, 20, 3)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = true))))
}
test("withField should replace field in nested struct") {
Seq(
structLevel2.withColumn("a", $"a".withField("a.b", lit(2))),
structLevel2.withColumn("a", Symbol("a").withField("a", $"a.a".withField("b", lit(2))))
).foreach { df =>
checkAnswer(
df,
Row(Row(Row(1, 2, 3))) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = false))),
nullable = false))))
}
}
test("withField should replace multiple fields in nested struct") {
Seq(
col("a").withField("a", $"a.a".withField("a", lit(10)).withField("b", lit(20))),
col("a").withField("a.a", lit(10)).withField("a.b", lit(20))
).foreach { column =>
checkAnswer(
structLevel2.select(column.as("a")),
Row(Row(Row(10, 20, 3))) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = false))),
nullable = false))))
}
}
test("withField should replace multiple fields in nested nullable struct") {
Seq(
col("a").withField("a", $"a.a".withField("a", lit(10)).withField("b", lit(20))),
col("a").withField("a.a", lit(10)).withField("a.b", lit(20))
).foreach { column =>
checkAnswer(
nullableStructLevel2.select(column.as("a")),
Row(null) :: Row(Row(null)) :: Row(Row(Row(10, 20, 3))) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = true))),
nullable = true))))
}
}
test("withField should replace field in deeply nested struct") {
checkAnswer(
structLevel3.withColumn("a", $"a".withField("a.a.b", lit(2))),
Row(Row(Row(Row(1, 2, 3)))) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = false))),
nullable = false))),
nullable = false))))
}
test("withField should replace all fields with given name in struct") {
val structLevel1 = spark.createDataFrame(
sparkContext.parallelize(Row(Row(1, 2, 3)) :: Nil),
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false))),
nullable = false))))
checkAnswer(
structLevel1.withColumn("a", Symbol("a").withField("b", lit(100))),
Row(Row(1, 100, 100)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false))),
nullable = false))))
}
test("withField should replace fields in struct in given order") {
checkAnswer(
structLevel1.withColumn("a", Symbol("a").withField("b", lit(2)).withField("b", lit(20))),
Row(Row(1, 20, 3)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = false))))
}
test("withField should add field and then replace same field in struct") {
checkAnswer(
structLevel1.withColumn("a", Symbol("a").withField("d", lit(4)).withField("d", lit(5))),
Row(Row(1, null, 3, 5)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = false),
StructField("d", IntegerType, nullable = false))),
nullable = false))))
}
test("withField should handle fields with dots in their name if correctly quoted") {
val df: DataFrame = spark.createDataFrame(
sparkContext.parallelize(Row(Row(Row(1, null, 3))) :: Nil),
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a.b", StructType(Seq(
StructField("c.d", IntegerType, nullable = false),
StructField("e.f", IntegerType, nullable = true),
StructField("g.h", IntegerType, nullable = false))),
nullable = false))),
nullable = false))))
checkAnswer(
df.withColumn("a", Symbol("a").withField("`a.b`.`e.f`", lit(2))),
Row(Row(Row(1, 2, 3))) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a.b", StructType(Seq(
StructField("c.d", IntegerType, nullable = false),
StructField("e.f", IntegerType, nullable = false),
StructField("g.h", IntegerType, nullable = false))),
nullable = false))),
nullable = false))))
intercept[AnalysisException] {
df.withColumn("a", Symbol("a").withField("a.b.e.f", lit(2)))
}.getMessage should include("No such struct field a in a.b")
}
private lazy val mixedCaseStructLevel1: DataFrame = spark.createDataFrame(
sparkContext.parallelize(Row(Row(1, 1)) :: Nil),
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("B", IntegerType, nullable = false))),
nullable = false))))
test("withField should replace field in struct even if casing is different") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
checkAnswer(
mixedCaseStructLevel1.withColumn("a", Symbol("a").withField("A", lit(2))),
Row(Row(2, 1)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("A", IntegerType, nullable = false),
StructField("B", IntegerType, nullable = false))),
nullable = false))))
checkAnswer(
mixedCaseStructLevel1.withColumn("a", Symbol("a").withField("b", lit(2))),
Row(Row(1, 2)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false))),
nullable = false))))
}
}
test("withField should add field to struct because casing is different") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
checkAnswer(
mixedCaseStructLevel1.withColumn("a", Symbol("a").withField("A", lit(2))),
Row(Row(1, 1, 2)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("B", IntegerType, nullable = false),
StructField("A", IntegerType, nullable = false))),
nullable = false))))
checkAnswer(
mixedCaseStructLevel1.withColumn("a", Symbol("a").withField("b", lit(2))),
Row(Row(1, 1, 2)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("B", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false))),
nullable = false))))
}
}
private lazy val mixedCaseStructLevel2: DataFrame = spark.createDataFrame(
sparkContext.parallelize(Row(Row(Row(1, 1), Row(1, 1))) :: Nil),
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false))),
nullable = false),
StructField("B", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false))),
nullable = false))),
nullable = false))))
test("withField should replace nested field in struct even if casing is different") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
checkAnswer(
mixedCaseStructLevel2.withColumn("a", Symbol("a").withField("A.a", lit(2))),
Row(Row(Row(2, 1), Row(1, 1))) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("A", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false))),
nullable = false),
StructField("B", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false))),
nullable = false))),
nullable = false))))
checkAnswer(
mixedCaseStructLevel2.withColumn("a", Symbol("a").withField("b.a", lit(2))),
Row(Row(Row(1, 1), Row(2, 1))) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false))),
nullable = false),
StructField("b", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false))),
nullable = false))),
nullable = false))))
}
}
test("withField should throw an exception because casing is different") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
intercept[AnalysisException] {
mixedCaseStructLevel2.withColumn("a", Symbol("a").withField("A.a", lit(2)))
}.getMessage should include("No such struct field A in a, B")
intercept[AnalysisException] {
mixedCaseStructLevel2.withColumn("a", Symbol("a").withField("b.a", lit(2)))
}.getMessage should include("No such struct field b in a, B")
}
}
test("withField user-facing examples") {
checkAnswer(
sql("SELECT named_struct('a', 1, 'b', 2) struct_col")
.select($"struct_col".withField("c", lit(3))),
Row(Row(1, 2, 3)))
checkAnswer(
sql("SELECT named_struct('a', 1, 'b', 2) struct_col")
.select($"struct_col".withField("b", lit(3))),
Row(Row(1, 3)))
checkAnswer(
sql("SELECT CAST(NULL AS struct<a:int,b:int>) struct_col")
.select($"struct_col".withField("c", lit(3))),
Row(null))
checkAnswer(
sql("SELECT named_struct('a', 1, 'b', 2, 'b', 3) struct_col")
.select($"struct_col".withField("b", lit(100))),
Row(Row(1, 100, 100)))
checkAnswer(
sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2)) struct_col")
.select($"struct_col".withField("a.c", lit(3))),
Row(Row(Row(1, 2, 3))))
intercept[AnalysisException] {
sql("SELECT named_struct('a', named_struct('b', 1), 'a', named_struct('c', 2)) struct_col")
.select($"struct_col".withField("a.c", lit(3)))
}.getMessage should include("Ambiguous reference to fields")
checkAnswer(
sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2)) struct_col")
.select($"struct_col".withField("a.c", lit(3)).withField("a.d", lit(4))),
Row(Row(Row(1, 2, 3, 4))))
checkAnswer(
sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2)) struct_col")
.select($"struct_col".withField("a",
$"struct_col.a".withField("c", lit(3)).withField("d", lit(4)))),
Row(Row(Row(1, 2, 3, 4))))
}
test("SPARK-32641: extracting field from non-null struct column after withField should return " +
"field value") {
// extract newly added field
checkAnswer(
structLevel1.withColumn("a", $"a".withField("d", lit(4)).getField("d")),
Row(4) :: Nil,
StructType(Seq(StructField("a", IntegerType, nullable = false))))
// extract newly replaced field
checkAnswer(
structLevel1.withColumn("a", $"a".withField("a", lit(4)).getField("a")),
Row(4) :: Nil,
StructType(Seq(StructField("a", IntegerType, nullable = false))))
// add new field, extract another field from original struct
checkAnswer(
structLevel1.withColumn("a", $"a".withField("d", lit(4)).getField("c")),
Row(3):: Nil,
StructType(Seq(StructField("a", IntegerType, nullable = false))))
// replace field, extract another field from original struct
checkAnswer(
structLevel1.withColumn("a", $"a".withField("a", lit(4)).getField("c")),
Row(3):: Nil,
StructType(Seq(StructField("a", IntegerType, nullable = false))))
}
test("SPARK-32641: extracting field from null struct column after withField should return " +
"null if the original struct was null") {
val nullStructLevel1 = spark.createDataFrame(
sparkContext.parallelize(Row(null) :: Nil),
StructType(Seq(StructField("a", structType, nullable = true))))
// extract newly added field
checkAnswer(
nullStructLevel1.withColumn("a", $"a".withField("d", lit(4)).getField("d")),
Row(null) :: Nil,
StructType(Seq(StructField("a", IntegerType, nullable = true))))
// extract newly replaced field
checkAnswer(
nullStructLevel1.withColumn("a", $"a".withField("a", lit(4)).getField("a")),
Row(null):: Nil,
StructType(Seq(StructField("a", IntegerType, nullable = true))))
// add new field, extract another field from original struct
checkAnswer(
nullStructLevel1.withColumn("a", $"a".withField("d", lit(4)).getField("c")),
Row(null):: Nil,
StructType(Seq(StructField("a", IntegerType, nullable = true))))
// replace field, extract another field from original struct
checkAnswer(
nullStructLevel1.withColumn("a", $"a".withField("a", lit(4)).getField("c")),
Row(null):: Nil,
StructType(Seq(StructField("a", IntegerType, nullable = true))))
}
test("SPARK-32641: extracting field from nullable struct column which contains both null and " +
"non-null values after withField should return null if the original struct was null") {
val df = spark.createDataFrame(
sparkContext.parallelize(Row(Row(1, null, 3)) :: Row(null) :: Nil),
StructType(Seq(StructField("a", structType, nullable = true))))
// extract newly added field
checkAnswer(
df.withColumn("a", $"a".withField("d", lit(4)).getField("d")),
Row(4) :: Row(null) :: Nil,
StructType(Seq(StructField("a", IntegerType, nullable = true))))
// extract newly replaced field
checkAnswer(
df.withColumn("a", $"a".withField("a", lit(4)).getField("a")),
Row(4) :: Row(null):: Nil,
StructType(Seq(StructField("a", IntegerType, nullable = true))))
// add new field, extract another field from original struct
checkAnswer(
df.withColumn("a", $"a".withField("d", lit(4)).getField("c")),
Row(3) :: Row(null):: Nil,
StructType(Seq(StructField("a", IntegerType, nullable = true))))
// replace field, extract another field from original struct
checkAnswer(
df.withColumn("a", $"a".withField("a", lit(4)).getField("c")),
Row(3) :: Row(null):: Nil,
StructType(Seq(StructField("a", IntegerType, nullable = true))))
}
test("SPARK-35213: chained withField operations should have correct schema for new columns") {
val df = spark.createDataFrame(
sparkContext.parallelize(Row(null) :: Nil),
StructType(Seq(StructField("data", NullType))))
checkAnswer(
df.withColumn("data", struct()
.withField("a", struct())
.withField("b", struct())
.withField("a.aa", lit("aa1"))
.withField("b.ba", lit("ba1"))
.withField("a.ab", lit("ab1"))),
Row(Row(Row("aa1", "ab1"), Row("ba1"))) :: Nil,
StructType(Seq(
StructField("data", StructType(Seq(
StructField("a", StructType(Seq(
StructField("aa", StringType, nullable = false),
StructField("ab", StringType, nullable = false)
)), nullable = false),
StructField("b", StructType(Seq(
StructField("ba", StringType, nullable = false)
)), nullable = false)
)), nullable = false)
))
)
}
test("SPARK-35213: optimized withField operations should maintain correct nested struct " +
"ordering") {
val df = spark.createDataFrame(
sparkContext.parallelize(Row(null) :: Nil),
StructType(Seq(StructField("data", NullType))))
checkAnswer(
df.withColumn("data", struct()
.withField("a", struct().withField("aa", lit("aa1")))
.withField("b", struct().withField("ba", lit("ba1")))
)
.withColumn("data", col("data").withField("b.bb", lit("bb1")))
.withColumn("data", col("data").withField("a.ab", lit("ab1"))),
Row(Row(Row("aa1", "ab1"), Row("ba1", "bb1"))) :: Nil,
StructType(Seq(
StructField("data", StructType(Seq(
StructField("a", StructType(Seq(
StructField("aa", StringType, nullable = false),
StructField("ab", StringType, nullable = false)
)), nullable = false),
StructField("b", StructType(Seq(
StructField("ba", StringType, nullable = false),
StructField("bb", StringType, nullable = false)
)), nullable = false)
)), nullable = false)
))
)
}
test("dropFields should throw an exception if called on a non-StructType column") {
intercept[AnalysisException] {
testData.withColumn("key", $"key".dropFields("a"))
}.getMessage should include("struct argument should be struct type, got: int")
}
test("dropFields should throw an exception if fieldName argument is null") {
intercept[IllegalArgumentException] {
structLevel1.withColumn("a", $"a".dropFields(null))
}.getMessage should include("fieldName cannot be null")
}
test("dropFields should throw an exception if any intermediate structs don't exist") {
intercept[AnalysisException] {
structLevel2.withColumn("a", Symbol("a").dropFields("x.b"))
}.getMessage should include("No such struct field x in a")
intercept[AnalysisException] {
structLevel3.withColumn("a", Symbol("a").dropFields("a.x.b"))
}.getMessage should include("No such struct field x in a")
}
test("dropFields should throw an exception if intermediate field is not a struct") {
intercept[AnalysisException] {
structLevel1.withColumn("a", Symbol("a").dropFields("b.a"))
}.getMessage should include("struct argument should be struct type, got: int")
}
test("dropFields should throw an exception if intermediate field reference is ambiguous") {
intercept[AnalysisException] {
val structLevel2: DataFrame = spark.createDataFrame(
sparkContext.parallelize(Row(Row(Row(1, null, 3), 4)) :: Nil),
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", structType, nullable = false),
StructField("a", structType, nullable = false))),
nullable = false))))
structLevel2.withColumn("a", Symbol("a").dropFields("a.b"))
}.getMessage should include("Ambiguous reference to fields")
}
test("dropFields should drop field in struct") {
checkAnswer(
structLevel1.withColumn("a", Symbol("a").dropFields("b")),
Row(Row(1, 3)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = false))))
}
test("dropFields should drop field in nullable struct") {
checkAnswer(
nullableStructLevel1.withColumn("a", $"a".dropFields("b")),
Row(null) :: Row(Row(1, 3)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = true))))
}
test("dropFields should drop multiple fields in struct") {
Seq(
structLevel1.withColumn("a", $"a".dropFields("b", "c")),
structLevel1.withColumn("a", Symbol("a").dropFields("b").dropFields("c"))
).foreach { df =>
checkAnswer(
df,
Row(Row(1)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false))),
nullable = false))))
}
}
test("dropFields should throw an exception if no fields will be left in struct") {
intercept[AnalysisException] {
structLevel1.withColumn("a", Symbol("a").dropFields("a", "b", "c"))
}.getMessage should include("cannot drop all fields in struct")
}
test("dropFields should drop field with no name in struct") {
val structType = StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("", IntegerType, nullable = false)))
val structLevel1: DataFrame = spark.createDataFrame(
sparkContext.parallelize(Row(Row(1, 2)) :: Nil),
StructType(Seq(StructField("a", structType, nullable = false))))
checkAnswer(
structLevel1.withColumn("a", $"a".dropFields("")),
Row(Row(1)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false))),
nullable = false))))
}
test("dropFields should drop field in nested struct") {
checkAnswer(
structLevel2.withColumn("a", Symbol("a").dropFields("a.b")),
Row(Row(Row(1, 3))) :: Nil,
StructType(
Seq(StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = false))),
nullable = false))))
}
test("dropFields should drop multiple fields in nested struct") {
checkAnswer(
structLevel2.withColumn("a", Symbol("a").dropFields("a.b", "a.c")),
Row(Row(Row(1))) :: Nil,
StructType(
Seq(StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false))),
nullable = false))),
nullable = false))))
}
test("dropFields should drop field in nested nullable struct") {
checkAnswer(
nullableStructLevel2.withColumn("a", $"a".dropFields("a.b")),
Row(null) :: Row(Row(null)) :: Row(Row(Row(1, 3))) :: Nil,
StructType(
Seq(StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = true))),
nullable = true))))
}
test("dropFields should drop multiple fields in nested nullable struct") {
checkAnswer(
nullableStructLevel2.withColumn("a", $"a".dropFields("a.b", "a.c")),
Row(null) :: Row(Row(null)) :: Row(Row(Row(1))) :: Nil,
StructType(
Seq(StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false))),
nullable = true))),
nullable = true))))
}
test("dropFields should drop field in deeply nested struct") {
checkAnswer(
structLevel3.withColumn("a", Symbol("a").dropFields("a.a.b")),
Row(Row(Row(Row(1, 3)))) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = false))),
nullable = false))),
nullable = false))))
}
test("dropFields should drop all fields with given name in struct") {
val structLevel1 = spark.createDataFrame(
sparkContext.parallelize(Row(Row(1, 2, 3)) :: Nil),
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false))),
nullable = false))))
checkAnswer(
structLevel1.withColumn("a", Symbol("a").dropFields("b")),
Row(Row(1)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false))),
nullable = false))))
}
test("dropFields should drop field in struct even if casing is different") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
checkAnswer(
mixedCaseStructLevel1.withColumn("a", Symbol("a").dropFields("A")),
Row(Row(1)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("B", IntegerType, nullable = false))),
nullable = false))))
checkAnswer(
mixedCaseStructLevel1.withColumn("a", Symbol("a").dropFields("b")),
Row(Row(1)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false))),
nullable = false))))
}
}
test("dropFields should not drop field in struct because casing is different") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
checkAnswer(
mixedCaseStructLevel1.withColumn("a", Symbol("a").dropFields("A")),
Row(Row(1, 1)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("B", IntegerType, nullable = false))),
nullable = false))))
checkAnswer(
mixedCaseStructLevel1.withColumn("a", Symbol("a").dropFields("b")),
Row(Row(1, 1)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("B", IntegerType, nullable = false))),
nullable = false))))
}
}
test("dropFields should drop nested field in struct even if casing is different") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
checkAnswer(
mixedCaseStructLevel2.withColumn("a", Symbol("a").dropFields("A.a")),
Row(Row(Row(1), Row(1, 1))) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("A", StructType(Seq(
StructField("b", IntegerType, nullable = false))),
nullable = false),
StructField("B", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false))),
nullable = false))),
nullable = false))))
checkAnswer(
mixedCaseStructLevel2.withColumn("a", Symbol("a").dropFields("b.a")),
Row(Row(Row(1, 1), Row(1))) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false))),
nullable = false),
StructField("b", StructType(Seq(
StructField("b", IntegerType, nullable = false))),
nullable = false))),
nullable = false))))
}
}
test("dropFields should throw an exception because casing is different") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
intercept[AnalysisException] {
mixedCaseStructLevel2.withColumn("a", Symbol("a").dropFields("A.a"))
}.getMessage should include("No such struct field A in a, B")
intercept[AnalysisException] {
mixedCaseStructLevel2.withColumn("a", Symbol("a").dropFields("b.a"))
}.getMessage should include("No such struct field b in a, B")
}
}
test("dropFields should drop only fields that exist") {
checkAnswer(
structLevel1.withColumn("a", Symbol("a").dropFields("d")),
Row(Row(1, null, 3)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = false))),
nullable = false))))
checkAnswer(
structLevel1.withColumn("a", Symbol("a").dropFields("b", "d")),
Row(Row(1, 3)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = false))))
checkAnswer(
structLevel2.withColumn("a", $"a".dropFields("a.b", "a.d")),
Row(Row(Row(1, 3))) :: Nil,
StructType(
Seq(StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("c", IntegerType, nullable = false))),
nullable = false))),
nullable = false))))
}
test("dropFields should drop multiple fields at arbitrary levels of nesting in a single call") {
val df: DataFrame = spark.createDataFrame(
sparkContext.parallelize(Row(Row(Row(1, null, 3), 4)) :: Nil),
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", structType, nullable = false),
StructField("b", IntegerType, nullable = false))),
nullable = false))))
checkAnswer(
df.withColumn("a", $"a".dropFields("a.b", "b")),
Row(Row(Row(1, 3))) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("c", IntegerType, nullable = false))), nullable = false))),
nullable = false))))
}
test("dropFields user-facing examples") {
checkAnswer(
sql("SELECT named_struct('a', 1, 'b', 2) struct_col")
.select($"struct_col".dropFields("b")),
Row(Row(1)))
checkAnswer(
sql("SELECT named_struct('a', 1, 'b', 2) struct_col")
.select($"struct_col".dropFields("c")),
Row(Row(1, 2)))
checkAnswer(
sql("SELECT named_struct('a', 1, 'b', 2, 'c', 3) struct_col")
.select($"struct_col".dropFields("b", "c")),
Row(Row(1)))
intercept[AnalysisException] {
sql("SELECT named_struct('a', 1, 'b', 2) struct_col")
.select($"struct_col".dropFields("a", "b"))
}.getMessage should include("cannot drop all fields in struct")
checkAnswer(
sql("SELECT CAST(NULL AS struct<a:int,b:int>) struct_col")
.select($"struct_col".dropFields("b")),
Row(null))
checkAnswer(
sql("SELECT named_struct('a', 1, 'b', 2, 'b', 3) struct_col")
.select($"struct_col".dropFields("b")),
Row(Row(1)))
checkAnswer(
sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2)) struct_col")
.select($"struct_col".dropFields("a.b")),
Row(Row(Row(1))))
intercept[AnalysisException] {
sql("SELECT named_struct('a', named_struct('b', 1), 'a', named_struct('c', 2)) struct_col")
.select($"struct_col".dropFields("a.c"))
}.getMessage should include("Ambiguous reference to fields")
checkAnswer(
sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2, 'c', 3)) struct_col")
.select($"struct_col".dropFields("a.b", "a.c")),
Row(Row(Row(1))))
checkAnswer(
sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2, 'c', 3)) struct_col")
.select($"struct_col".withField("a", $"struct_col.a".dropFields("b", "c"))),
Row(Row(Row(1))))
}
test("should correctly handle different dropField + withField + getField combinations") {
val structType = StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = false)))
val structLevel1: DataFrame = spark.createDataFrame(
sparkContext.parallelize(Row(Row(1, 2)) :: Nil),
StructType(Seq(StructField("a", structType, nullable = false))))
val nullStructLevel1: DataFrame = spark.createDataFrame(
sparkContext.parallelize(Row(null) :: Nil),
StructType(Seq(StructField("a", structType, nullable = true))))
val nullableStructLevel1: DataFrame = spark.createDataFrame(
sparkContext.parallelize(Row(Row(1, 2)) :: Row(null) :: Nil),
StructType(Seq(StructField("a", structType, nullable = true))))
def check(
fieldOps: Column => Column,
getFieldName: String,
expectedValue: Option[Int]): Unit = {
def query(df: DataFrame): DataFrame =
df.select(fieldOps(col("a")).getField(getFieldName).as("res"))
checkAnswer(
query(structLevel1),
Row(expectedValue.orNull) :: Nil,
StructType(Seq(StructField("res", IntegerType, nullable = expectedValue.isEmpty))))
checkAnswer(
query(nullStructLevel1),
Row(null) :: Nil,
StructType(Seq(StructField("res", IntegerType, nullable = true))))
checkAnswer(
query(nullableStructLevel1),
Row(expectedValue.orNull) :: Row(null) :: Nil,
StructType(Seq(StructField("res", IntegerType, nullable = true))))
}
// add attribute, extract an attribute from the original struct
check(_.withField("c", lit(3)), "a", Some(1))
check(_.withField("c", lit(3)), "b", Some(2))
// add attribute, extract added attribute
check(_.withField("c", lit(3)), "c", Some(3))
check(_.withField("c", col("a.a")), "c", Some(1))
check(_.withField("c", col("a.b")), "c", Some(2))
check(_.withField("c", lit(null).cast(IntegerType)), "c", None)
// replace attribute, extract an attribute from the original struct
check(_.withField("b", lit(3)), "a", Some(1))
check(_.withField("a", lit(3)), "b", Some(2))
// replace attribute, extract replaced attribute
check(_.withField("b", lit(3)), "b", Some(3))
check(_.withField("b", lit(null).cast(IntegerType)), "b", None)
check(_.withField("a", lit(3)), "a", Some(3))
check(_.withField("a", lit(null).cast(IntegerType)), "a", None)
// drop attribute, extract an attribute from the original struct
check(_.dropFields("b"), "a", Some(1))
check(_.dropFields("a"), "b", Some(2))
// drop attribute, add attribute, extract an attribute from the original struct
check(_.dropFields("b").withField("c", lit(3)), "a", Some(1))
check(_.dropFields("a").withField("c", lit(3)), "b", Some(2))
// drop attribute, add another attribute, extract added attribute
check(_.dropFields("a").withField("c", lit(3)), "c", Some(3))
check(_.dropFields("b").withField("c", lit(3)), "c", Some(3))
// add attribute, drop attribute, extract an attribute from the original struct
check(_.withField("c", lit(3)).dropFields("a"), "b", Some(2))
check(_.withField("c", lit(3)).dropFields("b"), "a", Some(1))
// add attribute, drop another attribute, extract added attribute
check(_.withField("c", lit(3)).dropFields("a"), "c", Some(3))
check(_.withField("c", lit(3)).dropFields("b"), "c", Some(3))
// replace attribute, drop same attribute, extract an attribute from the original struct
check(_.withField("b", lit(3)).dropFields("b"), "a", Some(1))
check(_.withField("a", lit(3)).dropFields("a"), "b", Some(2))
// add attribute, drop same attribute, extract an attribute from the original struct
check(_.withField("c", lit(3)).dropFields("c"), "a", Some(1))
check(_.withField("c", lit(3)).dropFields("c"), "b", Some(2))
// add attribute, drop another attribute, extract added attribute
check(_.withField("b", lit(3)).dropFields("a"), "b", Some(3))
check(_.withField("a", lit(3)).dropFields("b"), "a", Some(3))
check(_.withField("b", lit(null).cast(IntegerType)).dropFields("a"), "b", None)
check(_.withField("a", lit(null).cast(IntegerType)).dropFields("b"), "a", None)
// drop attribute, add same attribute, extract added attribute
check(_.dropFields("b").withField("b", lit(3)), "b", Some(3))
check(_.dropFields("a").withField("a", lit(3)), "a", Some(3))
check(_.dropFields("b").withField("b", lit(null).cast(IntegerType)), "b", None)
check(_.dropFields("a").withField("a", lit(null).cast(IntegerType)), "a", None)
check(_.dropFields("c").withField("c", lit(3)), "c", Some(3))
// add attribute, drop same attribute, add same attribute again, extract added attribute
check(_.withField("c", lit(3)).dropFields("c").withField("c", lit(4)), "c", Some(4))
}
test("should move field up one level of nesting") {
// move a field up one level
checkAnswer(
nullableStructLevel2.select(
col("a").withField("c", col("a.a.c")).dropFields("a.c").as("res")),
Row(null) :: Row(Row(null, null)) :: Row(Row(Row(1, null), 3)) :: Nil,
StructType(Seq(
StructField("res", StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true))),
nullable = true),
StructField("c", IntegerType, nullable = true))),
nullable = true))))
// move a field up one level and then extract it
checkAnswer(
nullableStructLevel2.select(
col("a").withField("c", col("a.a.c")).dropFields("a.c").getField("c").as("res")),
Row(null) :: Row(null) :: Row(3) :: Nil,
StructType(Seq(StructField("res", IntegerType, nullable = true))))
}
test("should be able to refer to newly added nested column") {
intercept[AnalysisException] {
structLevel1.select($"a".withField("d", lit(4)).withField("e", $"a.d" + 1).as("a"))
}.getMessage should include("No such struct field d in a, b, c")
checkAnswer(
structLevel1
.select($"a".withField("d", lit(4)).as("a"))
.select($"a".withField("e", $"a.d" + 1).as("a")),
Row(Row(1, null, 3, 4, 5)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = false),
StructField("d", IntegerType, nullable = false),
StructField("e", IntegerType, nullable = false))),
nullable = false))))
}
test("should be able to drop newly added nested column") {
Seq(
structLevel1.select($"a".withField("d", lit(4)).dropFields("d").as("a")),
structLevel1
.select($"a".withField("d", lit(4)).as("a"))
.select($"a".dropFields("d").as("a"))
).foreach { query =>
checkAnswer(
query,
Row(Row(1, null, 3)) :: Nil,
StructType(Seq(
StructField("a", structType, nullable = false))))
}
}
test("should still be able to refer to dropped column within the same select statement") {
// we can still access the nested column even after dropping it within the same select statement
checkAnswer(
structLevel1.select($"a".dropFields("c").withField("z", $"a.c").as("a")),
Row(Row(1, null, 3)) :: Nil,
StructType(Seq(
StructField("a", StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", IntegerType, nullable = true),
StructField("z", IntegerType, nullable = false))),
nullable = false))))
// we can't access the nested column in subsequent select statement after dropping it in a
// previous select statement
intercept[AnalysisException]{
structLevel1
.select($"a".dropFields("c").as("a"))
.select($"a".withField("z", $"a.c")).as("a")
}.getMessage should include("No such struct field c in a, b")
}
test("nestedDf should generate nested DataFrames") {
checkAnswer(
emptyNestedDf(1, 1, nullable = false),
Seq.empty[Row],
StructType(Seq(StructField("nested0Col0", StructType(Seq(
StructField("nested1Col0", IntegerType, nullable = false))),
nullable = false))))
checkAnswer(
emptyNestedDf(1, 2, nullable = false),
Seq.empty[Row],
StructType(Seq(StructField("nested0Col0", StructType(Seq(
StructField("nested1Col0", IntegerType, nullable = false),
StructField("nested1Col1", IntegerType, nullable = false))),
nullable = false))))
checkAnswer(
emptyNestedDf(2, 1, nullable = false),
Seq.empty[Row],
StructType(Seq(StructField("nested0Col0", StructType(Seq(
StructField("nested1Col0", StructType(Seq(
StructField("nested2Col0", IntegerType, nullable = false))),
nullable = false))),
nullable = false))))
checkAnswer(
emptyNestedDf(2, 2, nullable = false),
Seq.empty[Row],
StructType(Seq(StructField("nested0Col0", StructType(Seq(
StructField("nested1Col0", StructType(Seq(
StructField("nested2Col0", IntegerType, nullable = false),
StructField("nested2Col1", IntegerType, nullable = false))),
nullable = false),
StructField("nested1Col1", IntegerType, nullable = false))),
nullable = false))))
checkAnswer(
emptyNestedDf(2, 2, nullable = true),
Seq.empty[Row],
StructType(Seq(StructField("nested0Col0", StructType(Seq(
StructField("nested1Col0", StructType(Seq(
StructField("nested2Col0", IntegerType, nullable = false),
StructField("nested2Col1", IntegerType, nullable = false))),
nullable = true),
StructField("nested1Col1", IntegerType, nullable = false))),
nullable = true))))
}
Seq(Performant, NonPerformant).foreach { method =>
Seq(false, true).foreach { nullable =>
test(s"should add and drop 1 column at each depth of nesting using ${method.name} method, " +
s"nullable = $nullable") {
val maxDepth = 3
// dataframe with nested*Col0 to nested*Col2 at each depth
val inputDf = emptyNestedDf(maxDepth, 3, nullable)
// add nested*Col3 and drop nested*Col2
val modifiedColumn = method(
column = col(nestedColName(0, 0)),
numsToAdd = Seq(3),
numsToDrop = Seq(2),
maxDepth = maxDepth
).as(nestedColName(0, 0))
val resultDf = inputDf.select(modifiedColumn)
// dataframe with nested*Col0, nested*Col1, nested*Col3 at each depth
val expectedDf = {
val colNums = Seq(0, 1, 3)
val nestedColumnDataType = nestedStructType(colNums, nullable, maxDepth)
spark.createDataFrame(
spark.sparkContext.emptyRDD[Row],
StructType(Seq(StructField(nestedColName(0, 0), nestedColumnDataType, nullable))))
}
checkAnswer(resultDf, expectedDf.collect(), expectedDf.schema)
}
}
}
test("assert_true") {
// assert_true(condition, errMsgCol)
val booleanDf = Seq((true), (false)).toDF("cond")
checkAnswer(
booleanDf.filter("cond = true").select(assert_true($"cond")),
Row(null) :: Nil
)
val e1 = intercept[SparkException] {
booleanDf.select(assert_true($"cond", lit(null.asInstanceOf[String]))).collect()
}
assert(e1.getCause.isInstanceOf[RuntimeException])
assert(e1.getCause.getMessage == null)
val nullDf = Seq(("first row", None), ("second row", Some(true))).toDF("n", "cond")
checkAnswer(
nullDf.filter("cond = true").select(assert_true($"cond", $"cond")),
Row(null) :: Nil
)
val e2 = intercept[SparkException] {
nullDf.select(assert_true($"cond", $"n")).collect()
}
assert(e2.getCause.isInstanceOf[RuntimeException])
assert(e2.getCause.getMessage == "first row")
// assert_true(condition)
val intDf = Seq((0, 1)).toDF("a", "b")
checkAnswer(intDf.select(assert_true($"a" < $"b")), Row(null) :: Nil)
val e3 = intercept[SparkException] {
intDf.select(assert_true($"a" > $"b")).collect()
}
assert(e3.getCause.isInstanceOf[RuntimeException])
assert(e3.getCause.getMessage == "'('a > 'b)' is not true!")
}
test("raise_error") {
val strDf = Seq(("hello")).toDF("a")
val e1 = intercept[SparkException] {
strDf.select(raise_error(lit(null.asInstanceOf[String]))).collect()
}
assert(e1.getCause.isInstanceOf[RuntimeException])
assert(e1.getCause.getMessage == null)
val e2 = intercept[SparkException] {
strDf.select(raise_error($"a")).collect()
}
assert(e2.getCause.isInstanceOf[RuntimeException])
assert(e2.getCause.getMessage == "hello")
}
test("SPARK-34677: negate/add/subtract year-month and day-time intervals") {
import testImplicits._
val df = Seq((Period.ofMonths(10), Duration.ofDays(10), Period.ofMonths(1), Duration.ofDays(1)))
.toDF("year-month-A", "day-time-A", "year-month-B", "day-time-B")
val negatedDF = df.select(-$"year-month-A", -$"day-time-A")
checkAnswer(negatedDF, Row(Period.ofMonths(-10), Duration.ofDays(-10)))
val addDF = df.select($"year-month-A" + $"year-month-B", $"day-time-A" + $"day-time-B")
checkAnswer(addDF, Row(Period.ofMonths(11), Duration.ofDays(11)))
val subDF = df.select($"year-month-A" - $"year-month-B", $"day-time-A" - $"day-time-B")
checkAnswer(subDF, Row(Period.ofMonths(9), Duration.ofDays(9)))
}
test("SPARK-34721: add a year-month interval to a date") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
outstandingTimezonesIds.foreach { zid =>
withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> zid) {
Seq(
(LocalDate.of(1900, 10, 1), Period.ofMonths(0)) -> LocalDate.of(1900, 10, 1),
(LocalDate.of(1970, 1, 1), Period.ofMonths(-1)) -> LocalDate.of(1969, 12, 1),
(LocalDate.of(2021, 3, 11), Period.ofMonths(1)) -> LocalDate.of(2021, 4, 11),
(LocalDate.of(2020, 12, 31), Period.ofMonths(2)) -> LocalDate.of(2021, 2, 28),
(LocalDate.of(2021, 5, 31), Period.ofMonths(-3)) -> LocalDate.of(2021, 2, 28),
(LocalDate.of(2020, 2, 29), Period.ofYears(1)) -> LocalDate.of(2021, 2, 28),
(LocalDate.of(1, 1, 1), Period.ofYears(2020)) -> LocalDate.of(2021, 1, 1)
).foreach { case ((date, period), result) =>
val df = Seq((date, period)).toDF("date", "interval")
checkAnswer(
df.select($"date" + $"interval", $"interval" + $"date"),
Row(result, result))
}
}
}
val e = intercept[SparkException] {
Seq((LocalDate.of(2021, 3, 11), Period.ofMonths(Int.MaxValue)))
.toDF("date", "interval")
.select($"date" + $"interval")
.collect()
}.getCause
assert(e.isInstanceOf[ArithmeticException])
assert(e.getMessage.contains("integer overflow"))
}
}
test("SPARK-34721: subtract a year-month interval from a date") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
outstandingTimezonesIds.foreach { zid =>
withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> zid) {
Seq(
(LocalDate.of(1582, 10, 4), Period.ofMonths(0)) -> LocalDate.of(1582, 10, 4),
(LocalDate.of(1582, 10, 15), Period.ofMonths(1)) -> LocalDate.of(1582, 9, 15),
(LocalDate.of(1, 1, 1), Period.ofMonths(-1)) -> LocalDate.of(1, 2, 1),
(LocalDate.of(9999, 10, 31), Period.ofMonths(-2)) -> LocalDate.of(9999, 12, 31),
(LocalDate.of(2021, 5, 31), Period.ofMonths(3)) -> LocalDate.of(2021, 2, 28),
(LocalDate.of(2021, 2, 28), Period.ofYears(1)) -> LocalDate.of(2020, 2, 28),
(LocalDate.of(2020, 2, 29), Period.ofYears(4)) -> LocalDate.of(2016, 2, 29)
).foreach { case ((date, period), result) =>
val df = Seq((date, period)).toDF("date", "interval")
checkAnswer(df.select($"date" - $"interval"), Row(result))
}
}
}
val e = intercept[SparkException] {
Seq((LocalDate.of(2021, 3, 11), Period.ofMonths(Int.MaxValue)))
.toDF("date", "interval")
.select($"date" - $"interval")
.collect()
}.getCause
assert(e.isInstanceOf[ArithmeticException])
assert(e.getMessage.contains("integer overflow"))
}
}
test("SPARK-34739: add a year-month interval to a timestamp") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
outstandingZoneIds.foreach { zid =>
withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> zid.getId) {
Seq(
(LocalDateTime.of(1900, 1, 1, 0, 0, 0, 123456000), Period.ofMonths(0)) ->
LocalDateTime.of(1900, 1, 1, 0, 0, 0, 123456000),
(LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000), Period.ofMonths(-1)) ->
LocalDateTime.of(1969, 12, 1, 0, 0, 0, 1000),
(LocalDateTime.of(2021, 3, 14, 1, 2, 3, 0), Period.ofMonths(1)) ->
LocalDateTime.of(2021, 4, 14, 1, 2, 3, 0),
(LocalDateTime.of(2020, 12, 31, 23, 59, 59, 999999000), Period.ofMonths(2)) ->
LocalDateTime.of(2021, 2, 28, 23, 59, 59, 999999000),
(LocalDateTime.of(2021, 5, 31, 0, 0, 1, 0), Period.ofMonths(-3)) ->
LocalDateTime.of(2021, 2, 28, 0, 0, 1, 0),
(LocalDateTime.of(2020, 2, 29, 12, 13, 14), Period.ofYears(1)) ->
LocalDateTime.of(2021, 2, 28, 12, 13, 14),
(LocalDateTime.of(1, 1, 1, 1, 1, 1, 1000), Period.ofYears(2020)) ->
LocalDateTime.of(2021, 1, 1, 1, 1, 1, 1000)
).foreach { case ((ldt, period), expected) =>
val df = Seq((ldt.atZone(zid).toInstant, period)).toDF("ts", "interval")
val result = expected.atZone(zid).toInstant
checkAnswer(df.select($"ts" + $"interval", $"interval" + $"ts"), Row(result, result))
}
}
}
val e = intercept[SparkException] {
Seq((Instant.parse("2021-03-14T18:55:00Z"), Period.ofMonths(Int.MaxValue)))
.toDF("ts", "interval")
.select($"ts" + $"interval")
.collect()
}.getCause
assert(e.isInstanceOf[ArithmeticException])
assert(e.getMessage.contains("long overflow"))
}
}
test("SPARK-34739: subtract a year-month interval from a timestamp") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
outstandingZoneIds.foreach { zid =>
withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> zid.getId) {
Seq(
(LocalDateTime.of(1582, 10, 4, 0, 0, 0), Period.ofMonths(0)) ->
LocalDateTime.of(1582, 10, 4, 0, 0, 0),
(LocalDateTime.of(1582, 10, 15, 23, 59, 59, 999999000), Period.ofMonths(1)) ->
LocalDateTime.of(1582, 9, 15, 23, 59, 59, 999999000),
(LocalDateTime.of(1, 1, 1, 1, 1, 1, 1000), Period.ofMonths(-1)) ->
LocalDateTime.of(1, 2, 1, 1, 1, 1, 1000),
(LocalDateTime.of(9999, 10, 31, 23, 59, 59, 999000000), Period.ofMonths(-2)) ->
LocalDateTime.of(9999, 12, 31, 23, 59, 59, 999000000),
(LocalDateTime.of(2021, 5, 31, 0, 0, 0, 1000), Period.ofMonths(3)) ->
LocalDateTime.of(2021, 2, 28, 0, 0, 0, 1000),
(LocalDateTime.of(2021, 2, 28, 11, 12, 13, 123456000), Period.ofYears(1)) ->
LocalDateTime.of(2020, 2, 28, 11, 12, 13, 123456000),
(LocalDateTime.of(2020, 2, 29, 1, 2, 3, 5000), Period.ofYears(4)) ->
LocalDateTime.of(2016, 2, 29, 1, 2, 3, 5000)
).foreach { case ((ldt, period), expected) =>
val df = Seq((ldt.atZone(zid).toInstant, period)).toDF("ts", "interval")
checkAnswer(df.select($"ts" - $"interval"), Row(expected.atZone(zid).toInstant))
}
}
}
val e = intercept[SparkException] {
Seq((Instant.parse("2021-03-14T18:55:00Z"), Period.ofMonths(Int.MaxValue)))
.toDF("ts", "interval")
.select($"ts" - $"interval")
.collect()
}.getCause
assert(e.isInstanceOf[ArithmeticException])
assert(e.getMessage.contains("long overflow"))
}
}
test("SPARK-34761, SPARK-34903: add/subtract a day-time interval to/from a timestamp") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
outstandingZoneIds.foreach { zid =>
withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> zid.getId) {
Seq(
(LocalDateTime.of(1900, 1, 1, 0, 0, 0, 123456000), Duration.ofDays(0)) ->
LocalDateTime.of(1900, 1, 1, 0, 0, 0, 123456000),
(LocalDateTime.of(1970, 1, 1, 0, 0, 0, 100000000), Duration.ofDays(-1)) ->
LocalDateTime.of(1969, 12, 31, 0, 0, 0, 100000000),
(LocalDateTime.of(2021, 3, 14, 1, 2, 3), Duration.ofDays(1)) ->
LocalDateTime.of(2021, 3, 15, 1, 2, 3),
(LocalDateTime.of(2020, 12, 31, 23, 59, 59, 999000000),
Duration.ofDays(2 * 30).plusMillis(1)) -> LocalDateTime.of(2021, 3, 2, 0, 0, 0),
(LocalDateTime.of(2020, 3, 16, 0, 0, 0, 1000), Duration.of(-1, ChronoUnit.MICROS)) ->
LocalDateTime.of(2020, 3, 16, 0, 0, 0),
(LocalDateTime.of(2020, 2, 29, 12, 13, 14), Duration.ofDays(365)) ->
LocalDateTime.of(2021, 2, 28, 12, 13, 14),
(LocalDateTime.of(1582, 10, 4, 1, 2, 3, 40000000),
Duration.ofDays(10).plusMillis(60)) ->
LocalDateTime.of(1582, 10, 14, 1, 2, 3, 100000000)
).foreach { case ((ldt, duration), expected) =>
val ts = ldt.atZone(zid).toInstant
val result = expected.atZone(zid).toInstant
val df = Seq((ts, duration, result)).toDF("ts", "interval", "result")
checkAnswer(
df.select($"ts" + $"interval", $"interval" + $"ts", $"result" - $"interval",
$"result" - $"ts"),
Row(result, result, ts, duration))
}
}
}
Seq(
"2021-03-16T18:56:00Z" -> "ts + i",
"1900-03-16T18:56:00Z" -> "ts - i").foreach { case (instant, op) =>
val e = intercept[SparkException] {
Seq(
(Instant.parse(instant), Duration.of(Long.MaxValue, ChronoUnit.MICROS)))
.toDF("ts", "i")
.selectExpr(op)
.collect()
}.getCause
assert(e.isInstanceOf[ArithmeticException])
assert(e.getMessage.contains("long overflow"))
}
}
}
test("SPARK-34824: multiply year-month interval by numeric") {
checkAnswer(
Seq((Period.ofYears(0), 0)).toDF("i", "n").select($"i" * $"n"),
Row(Period.ofYears(0)))
checkAnswer(
Seq((Period.ofMonths(0), 10.toByte)).toDF("i", "n").select($"i" * $"n"),
Row(Period.ofMonths(0)))
checkAnswer(
Seq((Period.ofMonths(5), 3.toShort)).toDF("i", "n").select($"n" * $"i"),
Row(Period.ofYears(1).plusMonths(3)))
checkAnswer(
Seq((Period.ofYears(1000), "2")).toDF("i", "n").select($"i" * $"n"),
Row(Period.ofYears(2000)))
checkAnswer(
Seq((Period.ofMonths(1), 12L)).toDF("i", "n").select($"n" * $"i"),
Row(Period.ofYears(1)))
checkAnswer(
Seq((Period.ofYears(100).plusMonths(11), Short.MaxValue)).toDF("i", "n").select($"n" * $"i"),
Row(Period.ofYears(100).plusMonths(11).multipliedBy(Short.MaxValue).normalized()))
checkAnswer(
Seq((Period.ofMonths(-1), 0.499f)).toDF("i", "n").select($"i" * $"n"),
Row(Period.ofMonths(0)))
checkAnswer(
Seq((Period.ofMonths(10000000), 0.0000001d)).toDF("i", "n").select($"i" * $"n"),
Row(Period.ofMonths(1)))
checkAnswer(
Seq((Period.ofMonths(-10000000), BigDecimal(0.0000001d))).toDF("i", "n").select($"i" * $"n"),
Row(Period.ofMonths(-1)))
checkAnswer(
Seq((Period.ofMonths(-1), BigDecimal(0.5))).toDF("i", "n").select($"i" * $"n"),
Row(Period.ofMonths(-1)))
val e = intercept[SparkException] {
Seq((Period.ofYears(9999), Long.MinValue)).toDF("i", "n").select($"n" * $"i").collect()
}.getCause
assert(e.isInstanceOf[ArithmeticException])
assert(e.getMessage.contains("overflow"))
}
test("SPARK-34850: multiply day-time interval by numeric") {
checkAnswer(
Seq((Duration.ofDays(0), 0)).toDF("i", "n").select($"i" * $"n"),
Row(Duration.ofDays(0)))
checkAnswer(
Seq((Duration.ofDays(0), 10.toByte)).toDF("i", "n").select($"i" * $"n"),
Row(Duration.ofDays(0)))
checkAnswer(
Seq((Duration.ofHours(12), 3.toShort)).toDF("i", "n").select($"n" * $"i"),
Row(Duration.ofDays(1).plusHours(12)))
checkAnswer(
Seq((Duration.ofMinutes(1000), "2")).toDF("i", "n").select($"i" * $"n"),
Row(Duration.ofMinutes(2000)))
checkAnswer(
Seq((Duration.ofSeconds(1), 60L)).toDF("i", "n").select($"n" * $"i"),
Row(Duration.ofMinutes(1)))
checkAnswer(
Seq((Duration.of(-1, ChronoUnit.MICROS), 0.499f)).toDF("i", "n").select($"i" * $"n"),
Row(Duration.of(0, ChronoUnit.MICROS)))
checkAnswer(
Seq((Duration.of(-1, ChronoUnit.MICROS), 0.51d)).toDF("i", "n").select($"i" * $"n"),
Row(Duration.of(-1, ChronoUnit.MICROS)))
checkAnswer(
Seq((Duration.of(-10000000, ChronoUnit.MICROS), BigDecimal(0.0000001d)))
.toDF("i", "n").select($"i" * $"n"),
Row(Duration.of(-1, ChronoUnit.MICROS)))
val e = intercept[SparkException] {
Seq((Duration.ofDays(9999), Long.MinValue)).toDF("i", "n").select($"n" * $"i").collect()
}.getCause
assert(e.isInstanceOf[ArithmeticException])
assert(e.getMessage.contains("overflow"))
}
test("SPARK-34868: divide year-month interval by numeric") {
checkAnswer(
Seq((Period.ofYears(0), 10.toByte)).toDF("i", "n").select($"i" / $"n"),
Row(Period.ofYears(0)))
checkAnswer(
Seq((Period.ofYears(10), 3.toShort)).toDF("i", "n").select($"i" / $"n"),
Row(Period.ofYears(3).plusMonths(4)))
checkAnswer(
Seq((Period.ofYears(1000), "2")).toDF("i", "n").select($"i" / $"n"),
Row(Period.ofYears(500)))
checkAnswer(
Seq((Period.ofMonths(1).multipliedBy(Int.MaxValue), Int.MaxValue))
.toDF("i", "n").select($"i" / $"n"),
Row(Period.ofMonths(1)))
checkAnswer(
Seq((Period.ofYears(-1), 12L)).toDF("i", "n").select($"i" / $"n"),
Row(Period.ofMonths(-1)))
checkAnswer(
Seq((Period.ofMonths(-1), 0.499f)).toDF("i", "n").select($"i" / $"n"),
Row(Period.ofMonths(-2)))
checkAnswer(
Seq((Period.ofMonths(10000000), 10000000d)).toDF("i", "n").select($"i" / $"n"),
Row(Period.ofMonths(1)))
checkAnswer(
Seq((Period.ofMonths(-1), BigDecimal(0.5))).toDF("i", "n").select($"i" / $"n"),
Row(Period.ofMonths(-2)))
val e = intercept[SparkException] {
Seq((Period.ofYears(9999), 0)).toDF("i", "n").select($"i" / $"n").collect()
}.getCause
assert(e.isInstanceOf[ArithmeticException])
assert(e.getMessage.contains("divide by zero"))
val e2 = intercept[SparkException] {
Seq((Period.ofYears(9999), 0d)).toDF("i", "n").select($"i" / $"n").collect()
}.getCause
assert(e2.isInstanceOf[ArithmeticException])
assert(e2.getMessage.contains("divide by zero"))
val e3 = intercept[SparkException] {
Seq((Period.ofYears(9999), BigDecimal(0))).toDF("i", "n").select($"i" / $"n").collect()
}.getCause
assert(e3.isInstanceOf[ArithmeticException])
assert(e3.getMessage.contains("divide by zero"))
}
test("SPARK-34875: divide day-time interval by numeric") {
checkAnswer(
Seq((Duration.ZERO, 10.toByte)).toDF("i", "n").select($"i" / $"n"),
Row(Duration.ZERO))
checkAnswer(
Seq((Duration.ofDays(10), 3.toShort)).toDF("i", "n").select($"i" / $"n"),
Row(Duration.ofDays(10).dividedBy(3)))
checkAnswer(
Seq((Duration.ofHours(1000), "2")).toDF("i", "n").select($"i" / $"n"),
Row(Duration.ofHours(500)))
checkAnswer(
Seq((Duration.of(1, ChronoUnit.MICROS).multipliedBy(Long.MaxValue), Long.MaxValue))
.toDF("i", "n").select($"i" / $"n"),
Row(Duration.of(1, ChronoUnit.MICROS)))
checkAnswer(
Seq((Duration.ofMinutes(-1), 60L)).toDF("i", "n").select($"i" / $"n"),
Row(Duration.ofSeconds(-1)))
checkAnswer(
Seq((Duration.ofDays(-1), 0.5f)).toDF("i", "n").select($"i" / $"n"),
Row(Duration.ofDays(-2)))
checkAnswer(
Seq((Duration.ofMillis(10000000), 10000000d)).toDF("i", "n").select($"i" / $"n"),
Row(Duration.ofMillis(1)))
checkAnswer(
Seq((Duration.of(-1, ChronoUnit.MICROS), BigDecimal(10000.0001)))
.toDF("i", "n").select($"i" / $"n"),
Row(Duration.of(-1, ChronoUnit.MICROS).multipliedBy(10000).dividedBy(100000001)))
val e = intercept[SparkException] {
Seq((Duration.ofDays(9999), 0)).toDF("i", "n").select($"i" / $"n").collect()
}.getCause
assert(e.isInstanceOf[ArithmeticException])
assert(e.getMessage.contains("divide by zero"))
val e2 = intercept[SparkException] {
Seq((Duration.ofDays(9999), 0d)).toDF("i", "n").select($"i" / $"n").collect()
}.getCause
assert(e2.isInstanceOf[ArithmeticException])
assert(e2.getMessage.contains("divide by zero"))
val e3 = intercept[SparkException] {
Seq((Duration.ofDays(9999), BigDecimal(0))).toDF("i", "n").select($"i" / $"n").collect()
}.getCause
assert(e3.isInstanceOf[ArithmeticException])
assert(e3.getMessage.contains("divide by zero"))
}
test("SPARK-34896: return day-time interval from dates subtraction") {
withSQLConf(
SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true",
SQLConf.LEGACY_INTERVAL_ENABLED.key -> "false") {
outstandingTimezonesIds.foreach { zid =>
withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> zid) {
Seq(
(LocalDate.of(1582, 10, 15), LocalDate.of(1582, 10, 4)),
(LocalDate.of(1900, 10, 1), LocalDate.of(1900, 10, 1)),
(LocalDate.of(1969, 12, 1), LocalDate.of(1970, 1, 1)),
(LocalDate.of(2021, 3, 1), LocalDate.of(2020, 2, 29)),
(LocalDate.of(2021, 3, 15), LocalDate.of(2021, 3, 14)),
(LocalDate.of(1, 1, 1), LocalDate.of(2021, 3, 29))
).foreach { case (end, start) =>
val df = Seq((end, start)).toDF("end", "start")
val daysBetween = Duration.ofDays(ChronoUnit.DAYS.between(start, end))
val r = df.select($"end" - $"start").toDF("diff")
checkAnswer(r, Row(daysBetween))
assert(r.schema === new StructType().add("diff", DayTimeIntervalType(DAY)))
}
}
}
val e = intercept[SparkException] {
Seq((LocalDate.ofEpochDay(0), LocalDate.of(500000, 1, 1)))
.toDF("start", "end")
.select($"end" - $"start")
.collect()
}.getCause
assert(e.isInstanceOf[ArithmeticException])
assert(e.getMessage.contains("long overflow"))
}
}
test("SPARK-34903: Return day-time interval from timestamps subtraction") {
outstandingTimezonesIds.foreach { tz =>
withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> tz) {
checkAnswer(
sql("select timestamp '2021-03-31 19:11:10' - timestamp '2021-03-01 19:11:10'"),
Row(Duration.ofDays(30)))
checkAnswer(
Seq((Instant.parse("2021-03-31T00:01:02Z"), Instant.parse("2021-04-01T00:00:00Z")))
.toDF("start", "end").select($"end" - $"start" < Duration.ofDays(1)),
Row(true))
checkAnswer(
Seq((Instant.parse("2021-03-31T00:01:02.777Z"), Duration.ofMillis(333)))
.toDF("ts", "i")
.select(($"ts" + $"i") - $"ts"),
Row(Duration.ofMillis(333)))
checkAnswer(
Seq((LocalDateTime.of(2021, 3, 31, 10, 0, 0)
.atZone(DateTimeUtils.getZoneId(tz)).toInstant, LocalDate.of(2020, 3, 31)))
.toDF("ts", "d")
.select($"ts" - $"d"),
Row(Duration.ofDays(365).plusHours(10)))
}
}
}
test("SPARK-35051: add/subtract a day-time interval to/from a date") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
outstandingZoneIds.foreach { zid =>
withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> zid.getId) {
Seq(
(LocalDate.of(1, 1, 1), Duration.ofDays(31)) -> LocalDateTime.of(1, 2, 1, 0, 0, 0),
(LocalDate.of(1582, 9, 15), Duration.ofDays(30).plus(1, ChronoUnit.MICROS)) ->
LocalDateTime.of(1582, 10, 15, 0, 0, 0, 1000),
(LocalDate.of(1900, 1, 1), Duration.ofDays(0).plusHours(1)) ->
LocalDateTime.of(1900, 1, 1, 1, 0, 0),
(LocalDate.of(1970, 1, 1), Duration.ofDays(-1).minusMinutes(1)) ->
LocalDateTime.of(1969, 12, 30, 23, 59, 0),
(LocalDate.of(2021, 3, 14), Duration.ofDays(1)) ->
LocalDateTime.of(2021, 3, 15, 0, 0, 0),
(LocalDate.of(2020, 12, 31), Duration.ofDays(4 * 30).plusMinutes(30)) ->
LocalDateTime.of(2021, 4, 30, 0, 30, 0),
(LocalDate.of(2020, 2, 29), Duration.ofDays(365).plusSeconds(59)) ->
LocalDateTime.of(2021, 2, 28, 0, 0, 59),
(LocalDate.of(10000, 1, 1), Duration.ofDays(-2)) ->
LocalDateTime.of(9999, 12, 30, 0, 0, 0)
).foreach { case ((date, duration), expected) =>
val result = expected.atZone(zid).toInstant
val ts = date.atStartOfDay(zid).toInstant
val df = Seq((date, duration, result)).toDF("date", "interval", "result")
checkAnswer(
df.select($"date" + $"interval", $"interval" + $"date", $"result" - $"interval",
$"result" - $"date"),
Row(result, result, ts, duration))
}
}
}
Seq(
"2021-04-14" -> "date + i",
"1900-04-14" -> "date - i").foreach { case (date, op) =>
val e = intercept[SparkException] {
Seq(
(LocalDate.parse(date), Duration.of(Long.MaxValue, ChronoUnit.MICROS)))
.toDF("date", "i")
.selectExpr(op)
.collect()
}.getCause
assert(e.isInstanceOf[ArithmeticException])
assert(e.getMessage.contains("long overflow"))
}
}
}
test("SPARK-35852: add/subtract a interval day to/from a date") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
Seq(
(LocalDate.of(1, 1, 1), Duration.ofDays(31)),
(LocalDate.of(1582, 9, 15), Duration.ofDays(30)),
(LocalDate.of(1900, 1, 1), Duration.ofDays(0)),
(LocalDate.of(1970, 1, 1), Duration.ofDays(-1)),
(LocalDate.of(2021, 3, 14), Duration.ofDays(1)),
(LocalDate.of(2020, 12, 31), Duration.ofDays(4 * 30)),
(LocalDate.of(2020, 2, 29), Duration.ofDays(365)),
(LocalDate.of(10000, 1, 1), Duration.ofDays(-2))
).foreach { case (date, duration) =>
val days = duration.toDays
val add = date.plusDays(days)
val sub = date.minusDays(days)
val df = Seq((date, duration)).toDF("start", "diff")
.select($"start", $"diff" cast DayTimeIntervalType(DAY) as "diff")
.select($"start" + $"diff", $"diff" + $"start", $"start" - $"diff")
checkAnswer(df, Row(add, add, sub))
}
}
}
test("SPARK-36778: add ilike API for scala") {
// scalastyle:off
// non ascii characters are not allowed in the code, so we disable the scalastyle here.
// null handling
val nullDf = Seq("a", null).toDF("src")
checkAnswer(nullDf.filter($"src".ilike("A")), Row("a"))
checkAnswer(nullDf.filter($"src".ilike(null)), spark.emptyDataFrame)
// simple pattern
val simpleDf = Seq("a", "A", "abdef", "a_%b", "addb", "abC", "a\\nb").toDF("src")
checkAnswer(simpleDf.filter($"src".ilike("a")), Seq("a", "A").toDF())
checkAnswer(simpleDf.filter($"src".ilike("A")), Seq("a", "A").toDF())
checkAnswer(simpleDf.filter($"src".ilike("b")), spark.emptyDataFrame)
checkAnswer(simpleDf.filter($"src".ilike("aBdef")), Seq("abdef").toDF())
checkAnswer(simpleDf.filter($"src".ilike("a\\\\__b")), Seq("a_%b").toDF())
checkAnswer(simpleDf.filter($"src".ilike("A_%b")), Seq("a_%b", "addb", "a\\nb").toDF())
checkAnswer(simpleDf.filter($"src".ilike("a%")), simpleDf)
checkAnswer(simpleDf.filter($"src".ilike("a_b")), Seq("a\\nb").toDF())
// double-escaping backslash
val dEscDf = Seq("""\\__""", """\\\\__""").toDF("src")
checkAnswer(dEscDf.filter($"src".ilike("""\\\\\\__""")), Seq("""\\__""").toDF())
checkAnswer(dEscDf.filter($"src".ilike("""%\\\\%\\%""")), spark.emptyDataFrame)
// unicode
val uncDf = Seq("a\\u20ACA", "A€a", "a€AA", "a\\u20ACaz", "ЀЁЂѺΏỀ").toDF("src")
checkAnswer(uncDf.filter($"src".ilike("_\\u20AC_")), Seq("a\\u20ACA", "A€a").toDF())
checkAnswer(uncDf.filter($"src".ilike("_€_")), Seq("a\\u20ACA", "A€a").toDF())
checkAnswer(uncDf.filter($"src".ilike("_\\u20AC_a")), Seq("a€AA").toDF())
checkAnswer(uncDf.filter($"src".ilike("_€_Z")), Seq("a\\u20ACaz").toDF())
checkAnswer(uncDf.filter($"src".ilike("ѐёђѻώề")), Seq("ЀЁЂѺΏỀ").toDF())
// scalastyle:on
}
}
|
ueshin/apache-spark
|
sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala
|
Scala
|
apache-2.0
| 117,845
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.codegen.{GenerateMutableProjection, GenerateSafeProjection, GenerateUnsafeProjection}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{DataType, StructType}
/**
* A [[Projection]] that is calculated by calling the `eval` of each of the specified expressions.
*
* @param expressions a sequence of expressions that determine the value of each column of the
* output row.
*/
class InterpretedProjection(expressions: Seq[Expression]) extends Projection {
def this(expressions: Seq[Expression], inputSchema: Seq[Attribute]) =
this(expressions.map(BindReferences.bindReference(_, inputSchema)))
override def initialize(partitionIndex: Int): Unit = {
expressions.foreach(_.foreach {
case n: Nondeterministic => n.initialize(partitionIndex)
case _ =>
})
}
// null check is required for when Kryo invokes the no-arg constructor.
protected val exprArray = if (expressions != null) expressions.toArray else null
def apply(input: InternalRow): InternalRow = {
val outputArray = new Array[Any](exprArray.length)
var i = 0
while (i < exprArray.length) {
outputArray(i) = exprArray(i).eval(input)
i += 1
}
new GenericInternalRow(outputArray)
}
override def toString(): String = s"Row => [${exprArray.mkString(",")}]"
}
/**
* Converts a [[InternalRow]] to another Row given a sequence of expression that define each
* column of the new row. If the schema of the input row is specified, then the given expression
* will be bound to that schema.
*
* In contrast to a normal projection, a MutableProjection reuses the same underlying row object
* each time an input row is added. This significantly reduces the cost of calculating the
* projection, but means that it is not safe to hold on to a reference to a [[InternalRow]] after
* `next()` has been called on the [[Iterator]] that produced it. Instead, the user must call
* `InternalRow.copy()` and hold on to the returned [[InternalRow]] before calling `next()`.
*/
abstract class MutableProjection extends Projection {
def currentValue: InternalRow
/** Uses the given row to store the output of the projection. */
def target(row: InternalRow): MutableProjection
}
/**
* The factory object for `MutableProjection`.
*/
object MutableProjection
extends CodeGeneratorWithInterpretedFallback[Seq[Expression], MutableProjection] {
override protected def createCodeGeneratedObject(in: Seq[Expression]): MutableProjection = {
GenerateMutableProjection.generate(in, SQLConf.get.subexpressionEliminationEnabled)
}
override protected def createInterpretedObject(in: Seq[Expression]): MutableProjection = {
InterpretedMutableProjection.createProjection(in)
}
/**
* Returns an MutableProjection for given sequence of bound Expressions.
*/
def create(exprs: Seq[Expression]): MutableProjection = {
createObject(exprs)
}
/**
* Returns an MutableProjection for given sequence of Expressions, which will be bound to
* `inputSchema`.
*/
def create(exprs: Seq[Expression], inputSchema: Seq[Attribute]): MutableProjection = {
create(toBoundExprs(exprs, inputSchema))
}
}
/**
* A projection that returns UnsafeRow.
*
* CAUTION: the returned projection object should *not* be assumed to be thread-safe.
*/
abstract class UnsafeProjection extends Projection {
override def apply(row: InternalRow): UnsafeRow
}
/**
* The factory object for `UnsafeProjection`.
*/
object UnsafeProjection
extends CodeGeneratorWithInterpretedFallback[Seq[Expression], UnsafeProjection] {
override protected def createCodeGeneratedObject(in: Seq[Expression]): UnsafeProjection = {
GenerateUnsafeProjection.generate(in, SQLConf.get.subexpressionEliminationEnabled)
}
override protected def createInterpretedObject(in: Seq[Expression]): UnsafeProjection = {
InterpretedUnsafeProjection.createProjection(in)
}
protected def toUnsafeExprs(exprs: Seq[Expression]): Seq[Expression] = {
exprs.map(_ transform {
case CreateNamedStruct(children) => CreateNamedStructUnsafe(children)
})
}
/**
* Returns an UnsafeProjection for given StructType.
*
* CAUTION: the returned projection object is *not* thread-safe.
*/
def create(schema: StructType): UnsafeProjection = create(schema.fields.map(_.dataType))
/**
* Returns an UnsafeProjection for given Array of DataTypes.
*
* CAUTION: the returned projection object is *not* thread-safe.
*/
def create(fields: Array[DataType]): UnsafeProjection = {
create(fields.zipWithIndex.map(x => BoundReference(x._2, x._1, true)))
}
/**
* Returns an UnsafeProjection for given sequence of bound Expressions.
*/
def create(exprs: Seq[Expression]): UnsafeProjection = {
createObject(toUnsafeExprs(exprs))
}
def create(expr: Expression): UnsafeProjection = create(Seq(expr))
/**
* Returns an UnsafeProjection for given sequence of Expressions, which will be bound to
* `inputSchema`.
*/
def create(exprs: Seq[Expression], inputSchema: Seq[Attribute]): UnsafeProjection = {
create(toBoundExprs(exprs, inputSchema))
}
}
/**
* A projection that could turn UnsafeRow into GenericInternalRow
*/
object FromUnsafeProjection {
/**
* Returns a Projection for given StructType.
*/
def apply(schema: StructType): Projection = {
apply(schema.fields.map(_.dataType))
}
/**
* Returns an UnsafeProjection for given Array of DataTypes.
*/
def apply(fields: Seq[DataType]): Projection = {
create(fields.zipWithIndex.map(x => new BoundReference(x._2, x._1, true)))
}
/**
* Returns a Projection for given sequence of Expressions (bounded).
*/
private def create(exprs: Seq[Expression]): Projection = {
GenerateSafeProjection.generate(exprs)
}
}
|
michalsenkyr/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Projection.scala
|
Scala
|
apache-2.0
| 6,789
|
/*
* Copyright 2014 Renaud Bruneliere
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.bruneli.scalaopt.core.linesearch
import com.github.bruneli.scalaopt.core._
import com.github.bruneli.scalaopt.core.variable.{LineSearchPoint, UnconstrainedVariables}
import scala.util.{Failure, Success}
import org.scalatest._
class StrongWolfeSpec extends FlatSpec with Matchers {
import StrongWolfe._
"zoomStepLength" should "converge to minimum of 2d order polynomial" in {
val tol = UnconstrainedVariables(1.0e-9)
val x0 = UnconstrainedVariables(0.3)
val pk = UnconstrainedVariables(1.0)
val f = (x: UnconstrainedVariablesType) => (x - x0) dot (x - x0)
val df = (x: UnconstrainedVariablesType) => (x - x0) * 2.0
val pt0 = LineSearchPoint(UnconstrainedVariables(0.0), (f, df), pk)
val pt1 = pt0.copy(x = UnconstrainedVariables(1.0))
zoomStepLength(0.0, pt0, 1.0, pt1, pt0) match {
case Success(ptMin) => {
(ptMin.x - x0).norm should be < tol.norm
ptMin.fx should be < f(x0 + tol)
}
case Failure(e) => assert(false)
}
}
it should "converge to minimum of 3rd order polynomial" in {
val tol = UnconstrainedVariables(1.0e-9)
val x0 = UnconstrainedVariables(0.5)
val pk = UnconstrainedVariables(1.0)
val f = (x: UnconstrainedVariablesType) => {
val dx = (x - x0).norm
dx * dx + Math.pow(dx, 3.0)
}
val df = (x: UnconstrainedVariablesType) => {
val dx = (x - x0).norm
(x - x0) * 2.0 + pk * (3.0 * dx * dx)
}
val pt0 = LineSearchPoint(UnconstrainedVariables(0.0), (f, df), pk)
val pt1 = pt0.copy(x = UnconstrainedVariables(1.0))
zoomStepLength(0.0, pt0, 1.0, pt1, pt0) match {
case Success(ptmin) => {
(ptmin.x - x0).norm should be < tol.norm
ptmin.fx should be < f(x0 + tol)
}
case Failure(e) => assert(false)
}
}
it should "throw a MaxIterException if failing to converge" in {
val x0 = UnconstrainedVariables(0.3)
val pk = UnconstrainedVariables(1.0)
val f = (x: UnconstrainedVariablesType) => (x - x0).norm
val df = (x: UnconstrainedVariablesType) => pk
val pt0 = LineSearchPoint(UnconstrainedVariables(0.0), (f, df), pk)
val pt1 = pt0.copy(x = UnconstrainedVariables(1.0))
a [MaxIterException] should be thrownBy {
zoomStepLength(0.0, pt0, 1.0, pt1, pt0)
}
}
}
|
bruneli/scalaopt
|
core/src/test/scala/com/github/bruneli/scalaopt/core/linesearch/StrongWolfeSpec.scala
|
Scala
|
apache-2.0
| 2,912
|
package security
import javax.inject.{Inject, Singleton}
import be.objectify.deadbolt.scala.{DeadboltHandler, HandlerKey}
import be.objectify.deadbolt.scala.cache.HandlerCache
/**
* @author Steve Chaloner (steve@objectify.be)
*/
@Singleton
class MyHandlerCache @Inject() (defaultHandler: DeadboltHandler) extends HandlerCache {
private val handlers: Map[String, DeadboltHandler] = Map(defaultHandler.handlerName -> defaultHandler)
override def apply(): DeadboltHandler = defaultHandler
override def apply(key: HandlerKey): DeadboltHandler = handlers.get(key.toString).orNull
}
|
Arquisuave/oilcol-app-play
|
app/security/MyHandlerCache.scala
|
Scala
|
mit
| 593
|
package net.benmur.riemann.client.testingsupport
import akka.actor.Actor
import akka.actor.actorRef2Scala
import akka.event.Logging.InitializeLogger
import akka.event.Logging.LoggerInitialized
class NopEventHandler extends Actor {
def receive: Receive = {
case InitializeLogger(_) => sender ! LoggerInitialized
}
}
|
benmur/riemann-scala-client
|
src/test/scala/net/benmur/riemann/client/testingsupport/NopEventHandler.scala
|
Scala
|
mit
| 324
|
package org.inosion.dadagen.auto
import org.scalatest.{FlatSpec, Matchers}
case class MyTest(firstname: String, surname: String)
class ScalaClassReadTest extends FlatSpec with Matchers {
"The dadagen reader" should "populate all simple fields" in {
val testInstances: IndexedSeq[MyTest] = dadagen[MyTest].generate().take(400).toIndexedSeq
testInstances.size should be (400)
}
}
|
inosion/dadagen
|
dadagen-core/src/test/scala/org/inosion/dadagen/auto/ScalaClassReadTest.scala
|
Scala
|
apache-2.0
| 400
|
package repository
import org.specs2.specification.Scope
import play.api.test.{DefaultAwaitTimeout, FutureAwaits}
import reactivemongo.bson.BSONObjectID
import securesocial.core.authenticator.IdGenerator
import utils.UniqueStrings
import securesocial.core._
import securesocial.core.providers.UsernamePasswordProvider
trait ProfileTestCase
extends Scope
with UniqueStrings
with FutureAwaits
with DefaultAwaitTimeout {
lazy val profileRepository: ProfileRepository = new MongoProfileRepository {}
val id = BSONObjectID.generate
val userId = uniqueString
val firstName = Some(s"Joe-$userId")
val providerId = UsernamePasswordProvider.UsernamePassword
val email = s"$userId@somemail.com"
val profile = BasicProfile(
providerId = providerId,
userId = userId,
firstName = firstName,
lastName = Some("Bloggs"),
fullName = Some("Joe Blow Bloggs"),
email = Some(email),
avatarUrl = Some("http://2.gravatar.com/avatar/5f6b0d7f7c102038f2b367dbc797c736"),
authMethod = AuthenticationMethod.UserPassword,
oAuth1Info = Some(OAuth1Info(token = "oAuth1Token", secret = "oAuth1Secret")),
oAuth2Info = Some(OAuth2Info(accessToken = "oAuth2Token", tokenType = Some("oAuth2TokenType"), expiresIn = Some(999), refreshToken = Some("refreshToken"))),
passwordInfo = Some(PasswordInfo(hasher = "bcrypt", password = "password", salt = Some("salt")))
)
def generateAuthenticatorId: String = await(new IdGenerator.Default().generate)
val authenticatorId = generateAuthenticatorId
}
|
timothygordon32/reactive-todolist
|
it/repository/ProfileTestCase.scala
|
Scala
|
mit
| 1,533
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import org.apache.spark.sql.functions._
import org.apache.spark.sql.test.SharedSparkSession
class StringFunctionsSuite extends QueryTest with SharedSparkSession {
import testImplicits._
test("string concat") {
val df = Seq[(String, String, String)](("a", "b", null)).toDF("a", "b", "c")
checkAnswer(
df.select(concat($"a", $"b"), concat($"a", $"b", $"c")),
Row("ab", null))
checkAnswer(
df.selectExpr("concat(a, b)", "concat(a, b, c)"),
Row("ab", null))
}
test("string concat_ws") {
val df = Seq[(String, String, String)](("a", "b", null)).toDF("a", "b", "c")
checkAnswer(
df.select(concat_ws("||", $"a", $"b", $"c")),
Row("a||b"))
checkAnswer(
df.selectExpr("concat_ws('||', a, b, c)"),
Row("a||b"))
}
test("string elt") {
val df = Seq[(String, String, String, Int)](("hello", "world", null, 15))
.toDF("a", "b", "c", "d")
checkAnswer(
df.selectExpr("elt(0, a, b, c)", "elt(1, a, b, c)", "elt(4, a, b, c)"),
Row(null, "hello", null))
// check implicit type cast
checkAnswer(
df.selectExpr("elt(4, a, b, c, d)", "elt('2', a, b, c, d)"),
Row("15", "world"))
}
test("string Levenshtein distance") {
val df = Seq(("kitten", "sitting"), ("frog", "fog")).toDF("l", "r")
checkAnswer(df.select(levenshtein($"l", $"r")), Seq(Row(3), Row(1)))
checkAnswer(df.selectExpr("levenshtein(l, r)"), Seq(Row(3), Row(1)))
}
test("string regex_replace / regex_extract") {
val df = Seq(
("100-200", "(\\\\d+)-(\\\\d+)", "300"),
("100-200", "(\\\\d+)-(\\\\d+)", "400"),
("100-200", "(\\\\d+)", "400")).toDF("a", "b", "c")
checkAnswer(
df.select(
regexp_replace($"a", "(\\\\d+)", "num"),
regexp_replace($"a", $"b", $"c"),
regexp_extract($"a", "(\\\\d+)-(\\\\d+)", 1)),
Row("num-num", "300", "100") :: Row("num-num", "400", "100") ::
Row("num-num", "400-400", "100") :: Nil)
// for testing the mutable state of the expression in code gen.
// This is a hack way to enable the codegen, thus the codegen is enable by default,
// it will still use the interpretProjection if projection followed by a LocalRelation,
// hence we add a filter operator.
// See the optimizer rule `ConvertToLocalRelation`
checkAnswer(
df.filter("isnotnull(a)").selectExpr(
"regexp_replace(a, b, c)",
"regexp_extract(a, b, 1)"),
Row("300", "100") :: Row("400", "100") :: Row("400-400", "100") :: Nil)
}
test("non-matching optional group") {
val df = Seq(Tuple1("aaaac")).toDF("s")
checkAnswer(
df.select(regexp_extract($"s", "(foo)", 1)),
Row("")
)
checkAnswer(
df.select(regexp_extract($"s", "(a+)(b)?(c)", 2)),
Row("")
)
}
test("string ascii function") {
val df = Seq(("abc", "")).toDF("a", "b")
checkAnswer(
df.select(ascii($"a"), ascii($"b")),
Row(97, 0))
checkAnswer(
df.selectExpr("ascii(a)", "ascii(b)"),
Row(97, 0))
}
test("string base64/unbase64 function") {
val bytes = Array[Byte](1, 2, 3, 4)
val df = Seq((bytes, "AQIDBA==")).toDF("a", "b")
checkAnswer(
df.select(base64($"a"), unbase64($"b")),
Row("AQIDBA==", bytes))
checkAnswer(
df.selectExpr("base64(a)", "unbase64(b)"),
Row("AQIDBA==", bytes))
}
test("overlay function") {
// scalastyle:off
// non ascii characters are not allowed in the code, so we disable the scalastyle here.
val df = Seq(("Spark SQL", "Spark的SQL")).toDF("a", "b")
checkAnswer(df.select(overlay($"a", "_", 6)), Row("Spark_SQL"))
checkAnswer(df.select(overlay($"a", "CORE", 7)), Row("Spark CORE"))
checkAnswer(df.select(overlay($"a", "ANSI ", 7, 0)), Row("Spark ANSI SQL"))
checkAnswer(df.select(overlay($"a", "tructured", 2, 4)), Row("Structured SQL"))
checkAnswer(df.select(overlay($"b", "_", 6)), Row("Spark_SQL"))
// scalastyle:on
}
test("string / binary substring function") {
// scalastyle:off
// non ascii characters are not allowed in the code, so we disable the scalastyle here.
val df = Seq(("1世3", Array[Byte](1, 2, 3, 4))).toDF("a", "b")
checkAnswer(df.select(substring($"a", 1, 2)), Row("1世"))
checkAnswer(df.select(substring($"b", 2, 2)), Row(Array[Byte](2,3)))
checkAnswer(df.selectExpr("substring(a, 1, 2)"), Row("1世"))
// scalastyle:on
}
test("string encode/decode function") {
val bytes = Array[Byte](-27, -92, -89, -27, -115, -125, -28, -72, -106, -25, -107, -116)
// scalastyle:off
// non ascii characters are not allowed in the code, so we disable the scalastyle here.
val df = Seq(("大千世界", "utf-8", bytes)).toDF("a", "b", "c")
checkAnswer(
df.select(encode($"a", "utf-8"), decode($"c", "utf-8")),
Row(bytes, "大千世界"))
checkAnswer(
df.selectExpr("encode(a, 'utf-8')", "decode(c, 'utf-8')"),
Row(bytes, "大千世界"))
// scalastyle:on
}
test("string translate") {
val df = Seq(("translate", "")).toDF("a", "b")
checkAnswer(df.select(translate($"a", "rnlt", "123")), Row("1a2s3ae"))
checkAnswer(df.selectExpr("""translate(a, "rnlt", "")"""), Row("asae"))
}
test("string trim functions") {
val df = Seq((" example ", "", "example")).toDF("a", "b", "c")
checkAnswer(
df.select(ltrim($"a"), rtrim($"a"), trim($"a")),
Row("example ", " example", "example"))
checkAnswer(
df.select(ltrim($"c", "e"), rtrim($"c", "e"), trim($"c", "e")),
Row("xample", "exampl", "xampl"))
checkAnswer(
df.select(ltrim($"c", "xe"), rtrim($"c", "emlp"), trim($"c", "elxp")),
Row("ample", "exa", "am"))
checkAnswer(
df.select(trim($"c", "xyz")),
Row("example"))
checkAnswer(
df.selectExpr("ltrim(a)", "rtrim(a)", "trim(a)"),
Row("example ", " example", "example"))
}
test("string formatString function") {
val df = Seq(("aa%d%s", 123, "cc")).toDF("a", "b", "c")
checkAnswer(
df.select(format_string("aa%d%s", $"b", $"c")),
Row("aa123cc"))
checkAnswer(
df.selectExpr("printf(a, b, c)"),
Row("aa123cc"))
}
test("soundex function") {
val df = Seq(("MARY", "SU")).toDF("l", "r")
checkAnswer(
df.select(soundex($"l"), soundex($"r")), Row("M600", "S000"))
checkAnswer(
df.selectExpr("SoundEx(l)", "SoundEx(r)"), Row("M600", "S000"))
}
test("string instr function") {
val df = Seq(("aaads", "aa", "zz")).toDF("a", "b", "c")
checkAnswer(
df.select(instr($"a", "aa")),
Row(1))
checkAnswer(
df.selectExpr("instr(a, b)"),
Row(1))
}
test("string substring_index function") {
val df = Seq(("www.apache.org", ".", "zz")).toDF("a", "b", "c")
checkAnswer(
df.select(substring_index($"a", ".", 2)),
Row("www.apache"))
checkAnswer(
df.selectExpr("substring_index(a, '.', 2)"),
Row("www.apache")
)
}
test("string locate function") {
val df = Seq(("aaads", "aa", "zz", 2)).toDF("a", "b", "c", "d")
checkAnswer(
df.select(locate("aa", $"a"), locate("aa", $"a", 2), locate("aa", $"a", 0)),
Row(1, 2, 0))
checkAnswer(
df.selectExpr("locate(b, a)", "locate(b, a, d)", "locate(b, a, 3)"),
Row(1, 2, 0))
}
test("string padding functions") {
val df = Seq(("hi", 5, "??")).toDF("a", "b", "c")
checkAnswer(
df.select(lpad($"a", 1, "c"), lpad($"a", 5, "??"), rpad($"a", 1, "c"), rpad($"a", 5, "??")),
Row("h", "???hi", "h", "hi???"))
checkAnswer(
df.selectExpr("lpad(a, b, c)", "rpad(a, b, c)", "lpad(a, 1, c)", "rpad(a, 1, c)"),
Row("???hi", "hi???", "h", "h"))
}
test("string parse_url function") {
def testUrl(url: String, expected: Row) {
checkAnswer(Seq[String]((url)).toDF("url").selectExpr(
"parse_url(url, 'HOST')", "parse_url(url, 'PATH')",
"parse_url(url, 'QUERY')", "parse_url(url, 'REF')",
"parse_url(url, 'PROTOCOL')", "parse_url(url, 'FILE')",
"parse_url(url, 'AUTHORITY')", "parse_url(url, 'USERINFO')",
"parse_url(url, 'QUERY', 'query')"), expected)
}
testUrl(
"http://userinfo@spark.apache.org/path?query=1#Ref",
Row("spark.apache.org", "/path", "query=1", "Ref",
"http", "/path?query=1", "userinfo@spark.apache.org", "userinfo", "1"))
testUrl(
"https://use%20r:pas%20s@example.com/dir%20/pa%20th.HTML?query=x%20y&q2=2#Ref%20two",
Row("example.com", "/dir%20/pa%20th.HTML", "query=x%20y&q2=2", "Ref%20two",
"https", "/dir%20/pa%20th.HTML?query=x%20y&q2=2", "use%20r:pas%20s@example.com",
"use%20r:pas%20s", "x%20y"))
testUrl(
"http://user:pass@host",
Row("host", "", null, null, "http", "", "user:pass@host", "user:pass", null))
testUrl(
"http://user:pass@host/",
Row("host", "/", null, null, "http", "/", "user:pass@host", "user:pass", null))
testUrl(
"http://user:pass@host/?#",
Row("host", "/", "", "", "http", "/?", "user:pass@host", "user:pass", null))
testUrl(
"http://user:pass@host/file;param?query;p2",
Row("host", "/file;param", "query;p2", null, "http", "/file;param?query;p2",
"user:pass@host", "user:pass", null))
testUrl(
"inva lid://user:pass@host/file;param?query;p2",
Row(null, null, null, null, null, null, null, null, null))
}
test("string repeat function") {
val df = Seq(("hi", 2)).toDF("a", "b")
checkAnswer(
df.select(repeat($"a", 2)),
Row("hihi"))
checkAnswer(
df.selectExpr("repeat(a, 2)", "repeat(a, b)"),
Row("hihi", "hihi"))
}
test("string reverse function") {
val df = Seq(("hi", "hhhi")).toDF("a", "b")
checkAnswer(
df.select(reverse($"a"), reverse($"b")),
Row("ih", "ihhh"))
checkAnswer(
df.selectExpr("reverse(b)"),
Row("ihhh"))
}
test("string space function") {
val df = Seq((2, 3)).toDF("a", "b")
checkAnswer(
df.selectExpr("space(b)"),
Row(" "))
}
test("string split function with no limit") {
val df = Seq(("aa2bb3cc4", "[1-9]+")).toDF("a", "b")
checkAnswer(
df.select(split($"a", "[1-9]+")),
Row(Seq("aa", "bb", "cc", "")))
checkAnswer(
df.selectExpr("split(a, '[1-9]+')"),
Row(Seq("aa", "bb", "cc", "")))
}
test("string split function with limit explicitly set to 0") {
val df = Seq(("aa2bb3cc4", "[1-9]+")).toDF("a", "b")
checkAnswer(
df.select(split($"a", "[1-9]+", 0)),
Row(Seq("aa", "bb", "cc", "")))
checkAnswer(
df.selectExpr("split(a, '[1-9]+', 0)"),
Row(Seq("aa", "bb", "cc", "")))
}
test("string split function with positive limit") {
val df = Seq(("aa2bb3cc4", "[1-9]+")).toDF("a", "b")
checkAnswer(
df.select(split($"a", "[1-9]+", 2)),
Row(Seq("aa", "bb3cc4")))
checkAnswer(
df.selectExpr("split(a, '[1-9]+', 2)"),
Row(Seq("aa", "bb3cc4")))
}
test("string split function with negative limit") {
val df = Seq(("aa2bb3cc4", "[1-9]+")).toDF("a", "b")
checkAnswer(
df.select(split($"a", "[1-9]+", -2)),
Row(Seq("aa", "bb", "cc", "")))
checkAnswer(
df.selectExpr("split(a, '[1-9]+', -2)"),
Row(Seq("aa", "bb", "cc", "")))
}
test("string / binary length function") {
val df = Seq(("123", Array[Byte](1, 2, 3, 4), 123, 2.0f, 3.015))
.toDF("a", "b", "c", "d", "e")
checkAnswer(
df.select(length($"a"), length($"b")),
Row(3, 4))
checkAnswer(
df.selectExpr("length(a)", "length(b)"),
Row(3, 4))
checkAnswer(
df.selectExpr("length(c)", "length(d)", "length(e)"),
Row(3, 3, 5)
)
}
test("initcap function") {
val df = Seq(("ab", "a B", "sParK")).toDF("x", "y", "z")
checkAnswer(
df.select(initcap($"x"), initcap($"y"), initcap($"z")), Row("Ab", "A B", "Spark"))
checkAnswer(
df.selectExpr("InitCap(x)", "InitCap(y)", "InitCap(z)"), Row("Ab", "A B", "Spark"))
}
test("number format function") {
val df = spark.range(1)
checkAnswer(
df.select(format_number(lit(5L), 4)),
Row("5.0000"))
checkAnswer(
df.select(format_number(lit(1.toByte), 4)), // convert the 1st argument to integer
Row("1.0000"))
checkAnswer(
df.select(format_number(lit(2.toShort), 4)), // convert the 1st argument to integer
Row("2.0000"))
checkAnswer(
df.select(format_number(lit(3.1322.toFloat), 4)), // convert the 1st argument to double
Row("3.1322"))
checkAnswer(
df.select(format_number(lit(4), 4)), // not convert anything
Row("4.0000"))
checkAnswer(
df.select(format_number(lit(5L), 4)), // not convert anything
Row("5.0000"))
checkAnswer(
df.select(format_number(lit(6.48173), 4)), // not convert anything
Row("6.4817"))
checkAnswer(
df.select(format_number(lit(BigDecimal("7.128381")), 4)), // not convert anything
Row("7.1284"))
intercept[AnalysisException] {
df.select(format_number(lit("aa"), 4)) // string type of the 1st argument is unacceptable
}
intercept[AnalysisException] {
df.selectExpr("format_number(4, 6.48173)") // non-integral type 2nd argument is unacceptable
}
// for testing the mutable state of the expression in code gen.
// This is a hack way to enable the codegen, thus the codegen is enable by default,
// it will still use the interpretProjection if projection follows by a LocalRelation,
// hence we add a filter operator.
// See the optimizer rule `ConvertToLocalRelation`
val df2 = Seq((5L, 4), (4L, 3), (4L, 3), (4L, 3), (3L, 2)).toDF("a", "b")
checkAnswer(
df2.filter("b>0").selectExpr("format_number(a, b)"),
Row("5.0000") :: Row("4.000") :: Row("4.000") :: Row("4.000") :: Row("3.00") :: Nil)
}
test("string sentences function") {
val df = Seq(("Hi there! The price was $1,234.56.... But, not now.", "en", "US"))
.toDF("str", "language", "country")
checkAnswer(
df.selectExpr("sentences(str, language, country)"),
Row(Seq(Seq("Hi", "there"), Seq("The", "price", "was"), Seq("But", "not", "now"))))
// Type coercion
checkAnswer(
df.selectExpr("sentences(null)", "sentences(10)", "sentences(3.14)"),
Row(null, Seq(Seq("10")), Seq(Seq("3.14"))))
// Argument number exception
val m = intercept[AnalysisException] {
df.selectExpr("sentences()")
}.getMessage
assert(m.contains("Invalid number of arguments for function sentences"))
}
test("str_to_map function") {
val df1 = Seq(
("a=1,b=2", "y"),
("a=1,b=2,c=3", "y")
).toDF("a", "b")
checkAnswer(
df1.selectExpr("str_to_map(a,',','=')"),
Seq(
Row(Map("a" -> "1", "b" -> "2")),
Row(Map("a" -> "1", "b" -> "2", "c" -> "3"))
)
)
val df2 = Seq(("a:1,b:2,c:3", "y")).toDF("a", "b")
checkAnswer(
df2.selectExpr("str_to_map(a)"),
Seq(Row(Map("a" -> "1", "b" -> "2", "c" -> "3")))
)
}
}
|
pgandhi999/spark
|
sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala
|
Scala
|
apache-2.0
| 15,981
|
package scala.tools.nsc
package transform.patmat
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import scala.tools.asm.Opcodes._
import scala.tools.nsc.backend.jvm.AsmUtils._
import scala.tools.testkit.BytecodeTesting
import scala.tools.testkit.BytecodeTesting._
@RunWith(classOf[JUnit4])
class PatmatBytecodeTest extends BytecodeTesting {
val optCompiler = cached("optCompiler", () => newCompiler(extraArgs = "-opt:l:inline -opt-inline-from:**"))
import compiler._
@Test
def t6956(): Unit = {
val code =
"""class C {
| private[this] final val ONE = 1
|
| def s1(i: Byte): Int = i match {
| case ONE => 1
| case 2 => 2
| case 3 => 3
| case _ => 0
| }
|
| def s2(i: Byte): Int = i match {
| case 1 => 1
| case 2 => 2
| case 3 => 3
| case _ => 0
| }
|}
""".stripMargin
val c = compileClass(code)
assert(getInstructions(c, "s1").count(_.opcode == TABLESWITCH) == 1, textify(c))
assert(getInstructions(c, "s2").count(_.opcode == TABLESWITCH) == 1, textify(c))
}
@Test
def t6955(): Unit = {
val code =
"""class C {
| type Tag = Byte
|
| def s1(i: Tag): Int = i match { // notice type of i is Tag = Byte
| case 1 => 1
| case 2 => 2
| case 3 => 3
| case _ => 0
| }
|
| // this worked before, should keep working
| def s2(i: Byte): Int = i match {
| case 1 => 1
| case 2 => 2
| case 3 => 3
| case _ => 0
| }
|}
""".stripMargin
val c = compileClass(code)
assert(getInstructions(c, "s1").count(_.opcode == TABLESWITCH) == 1, textify(c))
assert(getInstructions(c, "s2").count(_.opcode == TABLESWITCH) == 1, textify(c))
}
@Test
def optNoPrimitiveTypetest(): Unit = {
val code =
"""case class Foo(x: Int, y: String)
|class C {
| def a = Foo(1, "a") match {
| case Foo(_: Int, y) => y
| }
|}
""".stripMargin
val c :: _ = optCompiler.compileClasses(code)
assertSameSummary(getMethod(c, "a"), List(
NEW, DUP, ICONST_1, LDC, "<init>",
"y", ARETURN))
}
@Test
def optNoNullCheck(): Unit = {
val code =
"""case class Foo(x: Any)
|class C {
| def a = (Foo(1): Any) match {
| case Foo(_: String) =>
| }
|}
""".stripMargin
val c :: _ = optCompiler.compileClasses(code)
assert(!getInstructions(c, "a").exists(i => i.opcode == IFNULL || i.opcode == IFNONNULL), textify(getAsmMethod(c, "a")))
}
@Test
def optNoLoacalForUnderscore(): Unit = {
val code =
"""case class Foo(x: Any, y: String)
|class C {
| def a = (Foo(1, "a"): @unchecked) match {
| case Foo(_: String, y) => y
| }
|}
""".stripMargin
val c :: _ = optCompiler.compileClasses(code)
assertSameSummary(getMethod(c, "a"), List(
NEW, DUP, ICONST_1, "valueOf", LDC, "<init>", ASTORE /*1*/,
ALOAD /*1*/, "y", ASTORE /*2*/,
ALOAD /*1*/, "x", INSTANCEOF, IFNE /*R*/,
NEW, DUP, ALOAD /*1*/, "<init>", ATHROW,
/*R*/ -1, ALOAD /*2*/, ARETURN))
}
@Test
def t6941(): Unit = {
val code =
"""class C {
| def a(xs: List[Int]) = xs match {
| case x :: _ => x
| }
| def b(xs: List[Int]) = xs match {
| case xs: ::[Int] => xs.head
| }
|}
""".stripMargin
val c = optCompiler.compileClass(code, allowMessage = _.msg.contains("may not be exhaustive"))
val expected = List(
ALOAD /*1*/ , INSTANCEOF /*::*/ , IFEQ /*A*/ ,
ALOAD, CHECKCAST /*::*/ , "head", "unboxToInt",
ISTORE, GOTO /*B*/ ,
-1 /*A*/ , NEW /*MatchError*/ , DUP, ALOAD /*1*/ , "<init>", ATHROW,
-1 /*B*/ , ILOAD, IRETURN)
assertSameSummary(getMethod(c, "a"), expected)
assertSameSummary(getMethod(c, "b"), expected)
}
@Test
def valPatterns(): Unit = {
val code =
"""case class C(a: Any, b: Int) {
| def tplCall = ("hi", 3)
| @inline final def tplInline = (true, 'z')
|
| def t1 = { val (a, b) = (1, 2); a + b }
| def t2 = { val (a, _) = (1, 3); a }
| def t3 = { val (s, i) = tplCall; s.length + i }
| def t4 = { val (_, i) = tplCall; i }
| def t5 = { val (b, c) = tplInline; b || c == 'e' }
| def t6 = { val (_, c) = tplInline; c }
|
| def t7 = { val C(s: String, b) = this; s.length + b }
| def t8 = { val C(_, b) = this; b }
| def t9 = { val C(a, _) = C("hi", 23); a.toString }
|}
""".stripMargin
val List(c, cMod) = optCompiler.compileClasses(code)
assertSameSummary(getMethod(c, "t1"), List(ICONST_1, ICONST_2, IADD, IRETURN))
assertSameSummary(getMethod(c, "t2"), List(ICONST_1, IRETURN))
assertInvokedMethods(getMethod(c, "t3"), List("C.tplCall", "scala/Tuple2._1", "scala/Tuple2._2$mcI$sp", "scala/MatchError.<init>", "java/lang/String.length"))
assertInvokedMethods(getMethod(c, "t4"), List("C.tplCall", "scala/Tuple2._2$mcI$sp", "scala/MatchError.<init>"))
assertNoInvoke(getMethod(c, "t5"))
assertSameSummary(getMethod(c, "t6"), List(BIPUSH, IRETURN))
// MatchError reachable because of the type pattern `s: String`
assertInvokedMethods(getMethod(c, "t7"), List("C.a", "C.b", "scala/MatchError.<init>", "java/lang/String.length"))
assertSameSummary(getMethod(c, "t8"), List(ALOAD, "b", IRETURN))
// C allocation not eliminated - constructor may have side-effects.
assertSameSummary(getMethod(c, "t9"), List(NEW, DUP, LDC, BIPUSH, "<init>", "a", "toString", ARETURN))
}
}
|
martijnhoekstra/scala
|
test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala
|
Scala
|
apache-2.0
| 5,903
|
// GENERATED CODE: DO NOT EDIT
package org.usagram.clarify
case class Validity3[+V1, +V2, +V3](_1: Definite[V1], _2: Definite[V2], _3: Definite[V3])
extends Validity with Product3[Definite[V1], Definite[V2], Definite[V3]] {
val values = Seq(_1, _2, _3)
def resolve[R](resolve: (V1, V2, V3) => R): R =
if (isValid) {
resolve(_1.value, _2.value, _3.value)
}
else {
throw new InvalidValueException(invalidValues)
}
}
|
takkkun/clarify
|
core/src/main/scala/org/usagram/clarify/Validity3.scala
|
Scala
|
mit
| 453
|
/*
* Copyright 2017 Guy Van den Broeck <guyvdb@cs.ucla.edu>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.ucla.cs.starai.sdd
import edu.ucla.cs.starai.logic._
import com.google.common.cache.Cache
import com.google.common.cache.CacheBuilder
import com.google.common.cache.CacheStats
import edu.ucla.cs.starai.graph.DoubleLinkedTree
import edu.ucla.cs.starai.graph.DAG
import scala.language.existentials
/**
* These are not implemented here because they are likely to require specialized implementations anyway
*/
trait ComposableXYDecomposition[N <: ComposableSDD[N]] extends XYDecomposition[N]{
// do not refer back to SDD nodes or manager that could refer to this object
//TODO check whether unique nodes cache is broken by reference to vtree here (and parent references in vtree).
//TODO consider sorting elements and associating a designated unique nodes and apply cache with the first prime!
def elements: Seq[ComposableElement[N]]
def mapPrimes(f: N => N): ComposableXYDecomposition[N]
def mapPrimes(f: N => N, extraPrime:N, extraSub: N): ComposableXYDecomposition[N]
def mapSubs(f: N => N): ComposableXYDecomposition[N]
def &&(that: ComposableXYDecomposition[N]): ComposableXYDecomposition[N]
// specialize instead of using mapSubs because negation cannot undo compression
def unary_!(): ComposableXYDecomposition[N]
}
sealed trait ComposableElement[N <: ComposableSDD[N]] extends Element[N] {
def &&(that: ComposableElement[N]): Option[ComposableElement[N]] = {
val newPrime = that.prime && this.prime
if(newPrime.isConsistent)
Some(new ComposableElementImpl(newPrime, ComposableElement.this.sub && that.sub))
else None
}
def mapPrime(f: N => N): Option[ComposableElement[N]] = {
val fprime = f(prime)
if(fprime.isConsistent) Some(new ComposableElementImpl(fprime, sub))
else None
}
def mapSub(f: N => N): ComposableElement[N] = {
new ComposableElementImpl(prime,f(sub))
}
def unary_!(): ComposableElement[N] = {
new ComposableElementImpl(prime,!sub)
}
}
final class ComposableElementImpl[N <: ComposableSDD[N]](val prime: N, val sub: N)
extends ComposableElement[N]
|
UCLA-StarAI/ScalaDD
|
src/main/scala/edu/ucla/cs/starai/sdd/ComposableXYDecomposition.scala
|
Scala
|
apache-2.0
| 2,735
|
package at.logic.gapt.examples.tip.prod
import at.logic.gapt.expr._
import at.logic.gapt.formats.ClasspathInputFile
import at.logic.gapt.formats.tip.TipSmtParser
import at.logic.gapt.proofs.gaptic._
import at.logic.gapt.proofs.{ Ant, Sequent }
object prop_06 extends TacticsProof {
val bench = TipSmtParser.fixupAndParse( ClasspathInputFile( "tip/prod/prop_06.smt2", getClass ) )
ctx = bench.ctx
val sequent = bench.toSequent.zipWithIndex.map {
case ( f, Ant( i ) ) => s"h$i" -> f
case ( f, _ ) => "goal" -> f
}
val theory = sequent.antecedent ++: Sequent()
val lem_2 = (
( "al1" -> hof"length(nil) = Z" ) +:
( "al2" -> hof"∀y ∀xs length(cons(y, xs)) = S(length(xs))" ) +:
( "aa1" -> hof"∀y append(nil, y) = y" ) +:
( "aa2" -> hof"∀z ∀xs ∀y append(cons(z, xs), y) = cons(z, append(xs, y))" ) +:
Sequent() :+ ( "append_left_cons" -> hof"∀xs∀y∀zs length(append(xs,cons(y,zs))) = S(length(append(xs,zs)))" )
)
val lem_2_proof = Lemma( lem_2 ) {
allR; induction( hov"xs:list" )
//- BC
decompose
rewrite.many ltr "aa1" in "append_left_cons"
rewrite.many ltr "al2" in "append_left_cons"
refl
//- IC
decompose
rewrite.many ltr "aa2" in "append_left_cons"
rewrite.many ltr "al2" in "append_left_cons"
rewrite.many ltr "IHxs_0" in "append_left_cons"
refl
}
val lem_3 = (
( "al2" -> hof"∀y ∀xs length(cons(y, xs)) = S(length(xs))" ) +:
( "al1" -> hof"length(nil) = Z" ) +:
( "aa1" -> hof"∀y append(nil, y) = y" ) +:
( "aa2" -> hof"∀z ∀xs ∀y append(cons(z, xs), y) = cons(z, append(xs, y))" ) +:
Sequent() :+ ( "append_one" -> hof"!xs!y length(append(xs,cons(y,nil))) = S(length(xs))" )
)
val lem_3_proof = Lemma( lem_3 ) {
cut( "lem_2", hof"∀xs∀y∀zs length(append(xs,cons(y,zs))) = S(length(append(xs,zs)))" )
insert( lem_2_proof )
decompose
rewrite ltr "lem_2" in "append_one"
induction( hov"xs:list" )
//- BC
rewrite ltr "aa1" in "append_one"
rewrite.many ltr "al1" in "append_one"
refl
//- IC
rewrite ltr "aa2" in "append_one"
rewrite.many ltr "al2" in "append_one"
rewrite ltr "IHxs_0" in "append_one"
refl
}
val prop_05 = (
( "al1" -> hof"length(nil) = Z" ) +:
( "al2" -> hof"∀y ∀xs length(cons(y, xs)) = S(length(xs))" ) +:
( "aa1" -> hof"∀y append(nil, y) = y" ) +:
( "aa2" -> hof"∀z ∀xs ∀y append(cons(z, xs), y) = cons(z, append(xs, y))" ) +:
( "ar1" -> hof"rev(nil) = nil" ) +:
( "ar2" -> hof"∀y ∀xs rev(cons(y, xs)) = append(rev(xs), cons(y, nil))" ) +:
Sequent() :+ ( "length_rev_inv" -> hof"∀x length(rev(x)) = length(x)" )
)
val prop_05_proof = Lemma( prop_05 ) {
cut( "lem_3", hof"!xs!y length(append(xs,cons(y,nil))) = S(length(xs))" )
insert( lem_3_proof )
allR; induction( hov"x:list" )
//- BC
rewrite ltr "ar1" in "length_rev_inv"
refl
//- IC
rewrite ltr "ar2" in "length_rev_inv"
rewrite ltr "lem_3" in "length_rev_inv"
rewrite ltr "al2" in "length_rev_inv"
rewrite ltr "IHx_0" in "length_rev_inv"
refl
}
val proof = Lemma( sequent ) {
allR; induction( hov"x:list" )
//- BC
allR
rewrite ltr "h7" in "goal"
rewrite ltr "h5" in "goal"
rewrite ltr "h3" in "goal"
cut( "prop_05", hof"∀x length(rev(x)) = length(x)" )
insert( prop_05_proof )
rewrite ltr "prop_05" in "goal"
refl
//- IC
decompose
rewrite ltr "h8" in "goal"
rewrite ltr "h10" in "goal"
rewrite ltr "h6" in "goal"
rewrite ltr "h4" in "goal"
cut( "lem_3", hof"!xs!y length(append(xs,cons(y,nil))) = S(length(xs))" )
insert( lem_3_proof )
rewrite ltr "lem_3" in "goal"
rewrite ltr "IHx_0" in "goal"
refl
}
}
|
gebner/gapt
|
examples/tip/prod/prop_06.scala
|
Scala
|
gpl-3.0
| 3,799
|
/*
* Copyright 2015 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.testkit
import org.scalatest.Tag
object SlowTest extends Tag("org.squbs.testkit.tags.SlowTest")
object DbTest extends Tag("org.squbs.testkit.tags.DbTest")
|
keshin/squbs
|
squbs-testkit/src/main/scala/org/squbs/testkit/Tags.scala
|
Scala
|
apache-2.0
| 771
|
package ca.jimr.scalatron.api
/*
* All the game entities
*/
abstract class Entity(val character: Char)
object Entity {
case object Unknown extends Entity('?')
case object Empty extends Entity('_')
case object Wall extends Entity('W')
case object Me extends Entity('M')
case object MiniMe extends Entity('S')
case object Enemy extends Entity('m')
case object MiniEnemy extends Entity('s')
case object Zugar extends Entity('P')
case object Toxifera extends Entity('p')
case object Fluppet extends Entity('B')
case object Snorg extends Entity('b')
def apply(input: Char) = input match {
case '?' => Unknown
case '_' => Empty
case 'W' => Wall
case 'M' => Me
case 'm' => Enemy
case 'S' => MiniMe
case 's' => MiniEnemy
case 'P' => Zugar
case 'p' => Toxifera
case 'B' => Fluppet
case 'b' => Snorg
case _ => throw new IllegalArgumentException("Invalid entity type")
}
// Filter operations
def isMeMaster(e: Entity) = e == Me
def isEnemyMaster(e: Entity) = e == Enemy
def isEnemyMini(e: Entity) = e == MiniEnemy
def isFood(e: Entity) = e == Fluppet || e == Zugar
}
|
jriecken/scalatron-bots
|
src/main/scala/ca/jimr/scalatron/api/Entity.scala
|
Scala
|
mit
| 1,151
|
package com.themillhousegroup.edn
import org.specs2.mutable.Specification
import com.themillhousegroup.edn.test.EDNParsing
import scala.util.Try
import com.themillhousegroup.edn.test.CaseClassFixtures._
import scala.reflect.runtime.universe._
import scala.Product
class ReadIntoFlatCaseClassSpec extends Specification with EDNParsing {
case class CannotCreate(x: String, y: String)
"Reading EDN into case classes - flat structures -" should {
"Reject a case class that won't be instantiable" in new CaseClassScope(
""" :x "foo" :y "bar" """) {
readInto[CannotCreate] must beAFailedTry[CannotCreate].withThrowable[UnsupportedOperationException]
}
"Support single-level mapping of simple strings" in new CaseClassScope(
""" :bish "foo" :bash "bar" :bosh "baz" """) {
val readResult = readIntoResult[AllStrings]
readResult must not beNull
readResult.bish must beEqualTo("foo")
readResult.bash must beEqualTo("bar")
readResult.bosh must beEqualTo("baz")
}
"Return a failed Try: IllegalArgumentException if a field is missing" in new CaseClassScope(
""" :bish "foo" :bash "bar" """) {
readInto[AllStrings] must beAFailedTry[AllStrings].withThrowable[IllegalArgumentException]
}
"Support single-level mapping of optional strings - present" in new CaseClassScope(
""" :bish "foo" :bash "bar" :bosh "baz" """) {
val readResult: OptionalStrings = readIntoResult[OptionalStrings]
readResult must not beNull
readResult.bish must beEqualTo("foo")
readResult.bash must beSome("bar")
readResult.bosh must beEqualTo("baz")
}
"Support single-level mapping of optional strings - absent" in new CaseClassScope(
""" :bish "foo" :bosh "baz" """) {
val readResult: OptionalStrings = readIntoResult[OptionalStrings]
readResult must not beNull
readResult.bish must beEqualTo("foo")
readResult.bash must beNone
readResult.bosh must beEqualTo("baz")
}
"Support automatic mapping of Longs to Ints" in new CaseClassScope(
""" :bash 6 :bosh 9 """) {
val readResult = readIntoResult[IntsNotLongs]
readResult must not beNull
readResult.bash must beSome(6)
readResult.bosh must beEqualTo(9)
}
"Support Longs in case classes" in new CaseClassScope(
""" :bash 6 :bosh 9 """) {
val readResult = readIntoResult[AllLongs]
readResult must not beNull
readResult.bash must beSome(6)
readResult.bosh must beEqualTo(9)
}
"Support single-level mapping of mixed types" in new CaseClassScope(
""" :bish "foo" :bash 6 :bosh 9 """) {
val readResult = readIntoResult[MixedBunch]
readResult must not beNull
readResult.bish must beEqualTo("foo")
readResult.bash must beSome(6)
readResult.bosh must beEqualTo(9)
}
"Support single-level mapping where a member is a list" in new CaseClassScope(
""" :bish "foo" :bash ("x" "y" "z") :bosh 9 """) {
val readResult = readIntoResult[BasicWithList]
readResult must not beNull
readResult.bish must beEqualTo("foo")
readResult.bash must containTheSameElementsAs(Seq("x", "y", "z"))
readResult.bosh must beEqualTo(9)
}
"Support single-level mapping where a member is a vector" in new CaseClassScope(
""" :bish "foo" :bash ["x" "y" "z"] :bosh 9 """) {
val readResult = readIntoResult[BasicWithList]
readResult must not beNull
readResult.bish must beEqualTo("foo")
readResult.bash must containTheSameElementsAs(Seq("x", "y", "z"))
readResult.bosh must beEqualTo(9)
}
"Support single-level mapping where a member is a set" in new CaseClassScope(
""" :bish "foo" :bash #{"x" "y" "z"} :bosh 9 """) {
val readResult = readIntoResult[BasicWithSet]
readResult must not beNull
readResult.bish must beEqualTo("foo")
readResult.bash must containTheSameElementsAs(Seq("x", "y", "z"))
readResult.bosh must beEqualTo(9)
}
"Support single-level mapping where a member is a map" in new CaseClassScope(
""" :bish "foo" :bash {:x "eks" :y "wye" :z "zed" } :bosh 9 """) {
val readResult = readIntoResult[BasicWithMap]
readResult must not beNull
readResult.bish must beEqualTo("foo")
readResult.bash must havePairs("x" -> "eks", "y" -> "wye", "z" -> "zed")
readResult.bosh must beEqualTo(9)
}
}
}
|
themillhousegroup/edn-scala
|
src/test/scala/com/themillhousegroup/edn/ReadIntoFlatCaseClassSpec.scala
|
Scala
|
gpl-2.0
| 4,480
|
package com.github.mdr.graphospasm.grapheditor.part
import com.github.mdr.graphospasm.grapheditor.figure.NodeFigure
import com.github.mdr.graphospasm.grapheditor.Plugin
import org.eclipse.swt.graphics.Color
import org.eclipse.draw2d.IFigure
import org.eclipse.gef.EditPart
import org.eclipse.gef.GraphicalEditPart
import org.eclipse.gef.Request
import org.eclipse.gef.RequestConstants._
import org.eclipse.gef.editpolicies.GraphicalEditPolicy
import PartialFunction._
import org.eclipse.gef.requests.CreateRequest
import com.github.mdr.graphospasm.grapheditor.ConnectionInProgress
import com.github.mdr.graphospasm.grapheditor.Attribute
class NodeTargetFeedbackEditPolicy extends GraphicalEditPolicy {
private final def getFigure = getHost.asInstanceOf[NodeEditPart].getFigure
override def getTargetEditPart(request: Request): EditPart =
if (request.getType == REQ_SELECTION_HOVER) getHost else null
def showHighlight() {
getFigure.targetFeedback = true
}
override def eraseTargetFeedback(request: Request) {
getFigure.targetFeedback = false
}
override def showTargetFeedback(request: Request) {
val highlight = cond(request.getType) {
case REQ_MOVE | REQ_ADD | REQ_CLONE | REQ_CONNECTION_START | REQ_CONNECTION_END | REQ_RECONNECT_SOURCE | REQ_RECONNECT_TARGET ⇒ true
case REQ_CREATE ⇒ cond(request) {
case createRequest: CreateRequest ⇒ cond(createRequest.getNewObject) {
case _: ConnectionInProgress | _: Attribute ⇒ true
}
}
}
if (highlight)
showHighlight()
}
}
|
mdr/graphospasm
|
com.github.mdr.graphospasm.grapheditor/src/main/scala/com/github/mdr/graphospasm/grapheditor/part/NodeTargetFeedbackEditPolicy.scala
|
Scala
|
mit
| 1,571
|
package vexriscv.plugin
import vexriscv._
import spinal.core._
import spinal.lib._
import scala.collection.mutable.ArrayBuffer
class PcManagerSimplePlugin(resetVector : BigInt,
relaxedPcCalculation : Boolean = false,
keepPcPlus4 : Boolean = true) extends Plugin[VexRiscv]{
override def build(pipeline: VexRiscv): Unit = {println("PcManagerSimplePlugin is now useless")}
}
//class PcManagerSimplePlugin(resetVector : BigInt,
// relaxedPcCalculation : Boolean = false,
// keepPcPlus4 : Boolean = true) extends Plugin[VexRiscv] with JumpService{
// //FetchService interface
// case class JumpInfo(interface : Flow[UInt], stage: Stage, priority : Int)
// val jumpInfos = ArrayBuffer[JumpInfo]()
// override def createJumpInterface(stage: Stage, priority : Int = 0): Flow[UInt] = {
// val interface = Flow(UInt(32 bits))
// jumpInfos += JumpInfo(interface,stage, priority)
// interface
// }
// var prefetchExceptionPort : Flow[ExceptionCause] = null
//
// override def setup(pipeline: VexRiscv): Unit = {
// if(!relaxedPcCalculation) pipeline.unremovableStages += pipeline.prefetch
// }
//
//
// override def build(pipeline: VexRiscv): Unit = {
// import pipeline.config._
// import pipeline._
//
// if(relaxedPcCalculation)
// relaxedImpl(pipeline)
// else
// cycleEffectiveImpl(pipeline)
//
// //Formal verification signals generation
// prefetch.insert(FORMAL_PC_NEXT) := prefetch.input(PC) + 4
// jumpInfos.foreach(info => {
// when(info.interface.valid){
// info.stage.output(FORMAL_PC_NEXT) := info.interface.payload
// }
// })
// }
//
// //reduce combinatorial path, and expose the PC to the pipeline as a register
// def relaxedImpl(pipeline: VexRiscv): Unit = {
// import pipeline.config._
// import pipeline._
//
// prefetch plug new Area {
// import prefetch._
// //Stage always valid
// arbitration.isValid := True
//
// //PC calculation without Jump
// val pcReg = Reg(UInt(32 bits)) init(resetVector) addAttribute(Verilator.public)
// val pcPlus4 = pcReg + 4
// if(keepPcPlus4) KeepAttribute(pcPlus4)
// when(arbitration.isFiring){
// pcReg := pcPlus4
// }
//
// //JumpService hardware implementation
// val jump = if(jumpInfos.length != 0) new Area {
// val sortedByStage = jumpInfos.sortWith((a, b) => {
// (pipeline.indexOf(a.stage) > pipeline.indexOf(b.stage)) ||
// (pipeline.indexOf(a.stage) == pipeline.indexOf(b.stage) && a.priority > b.priority)
// })
// val valids = sortedByStage.map(_.interface.valid)
// val pcs = sortedByStage.map(_.interface.payload)
//
// val pcLoad = Flow(UInt(32 bits))
// pcLoad.valid := jumpInfos.map(_.interface.valid).orR
// pcLoad.payload := MuxOH(OHMasking.first(valids.asBits), pcs)
//
// //application of the selected jump request
// when(pcLoad.valid) {
// pcReg := pcLoad.payload
// }
// }
//
// insert(PC_CALC_WITHOUT_JUMP) := pcReg
// insert(PC) := pcReg
// }
// }
//
// //Jump take effect instantly (save one cycle), but expose the PC to the pipeline as a 'long' combinatorial path
// def cycleEffectiveImpl(pipeline: VexRiscv): Unit = {
// import pipeline.config._
// import pipeline.prefetch
//
// prefetch plug new Area {
// import prefetch._
// //Stage always valid
// arbitration.isValid := True
//
// //PC calculation without Jump
// val pcReg = Reg(UInt(32 bits)) init(resetVector) addAttribute(Verilator.public)
// val inc = RegInit(False)
// val pcBeforeJumps = pcReg + (inc ## B"00").asUInt
// insert(PC_CALC_WITHOUT_JUMP) := pcBeforeJumps
// val pc = UInt(32 bits)
// pc := input(PC_CALC_WITHOUT_JUMP)
//
// val samplePcNext = False
//
// //JumpService hardware implementation
// val jump = if(jumpInfos.length != 0) new Area {
// val sortedByStage = jumpInfos.sortWith((a, b) => pipeline.indexOf(a.stage) > pipeline.indexOf(b.stage))
// val valids = sortedByStage.map(_.interface.valid)
// val pcs = sortedByStage.map(_.interface.payload)
//
// val pcLoad = Flow(UInt(32 bits))
// pcLoad.valid := jumpInfos.map(_.interface.valid).orR
// pcLoad.payload := MuxOH(OHMasking.first(valids.asBits), pcs)
//
// //application of the selected jump request
// when(pcLoad.valid) {
// inc := False
// samplePcNext := True
// pc := pcLoad.payload
// }
// }
//
// when(arbitration.isFiring){
// inc := True
// samplePcNext := True
// }
//
// when(samplePcNext) { pcReg := pc }
//
// insert(PC) := pc
// }
// }
//}
|
SpinalHDL/VexRiscv
|
src/main/scala/vexriscv/plugin/PcManagerSimplePlugin.scala
|
Scala
|
mit
| 4,874
|
package org.jetbrains.plugins.scala.worksheet
import com.intellij.codeInspection.LocalInspectionTool
import com.intellij.openapi.fileTypes.LanguageFileType
import com.intellij.psi.PsiFile
import org.jetbrains.plugins.scala.base.SharedTestProjectToken
import org.jetbrains.plugins.scala.{LatestScalaVersions, ScalaVersion}
import org.jetbrains.plugins.scala.codeInspection.feature.{LanguageFeatureInspection, LanguageFeatureInspectionTestBase}
import org.jetbrains.plugins.scala.project._
import org.jetbrains.plugins.scala.project.settings.{ScalaCompilerConfiguration, ScalaCompilerSettingsProfile}
import org.jetbrains.plugins.scala.worksheet.settings.persistent.WorksheetFilePersistentSettings
abstract class WorksheetLanguageFeatureInspectionBaseTest extends LanguageFeatureInspectionTestBase {
override protected val fileType: LanguageFileType = WorksheetFileType
override protected val classOfInspection: Class[_ <: LocalInspectionTool] = classOf[LanguageFeatureInspection]
override protected val description = "Advanced language feature: higher-kinded type "
}
class WorksheetLanguageFeatureInspection extends WorksheetLanguageFeatureInspectionBaseTest {
override protected def supportedIn(version: ScalaVersion): Boolean = version <= LatestScalaVersions.Scala_2_12
def testThatModuleCompilerProfileSettingsAreUsedInWorksheet_HasError(): Unit = {
val profile = getModule.scalaCompilerSettingsProfile
val newSettings = profile.getSettings.copy(higherKinds = false)
profile.setSettings(newSettings)
checkTextHasError(
s"""def foo[F$START[_]$END, A](fa: F[A]): String = "123"
|""".stripMargin
)
}
def testThatModuleCompilerProfileSettingsAreUsedInWorksheet_NoError(): Unit = {
val profile = getModule.scalaCompilerSettingsProfile
val newSettings = profile.getSettings.copy(higherKinds = true)
profile.setSettings(newSettings)
checkTextHasNoErrors(
s"""def foo[F[_], A](fa: F[A]): String = "123"
|""".stripMargin
)
}
}
class WorksheetScratchFileLanguageFeatureInspection extends WorksheetLanguageFeatureInspectionBaseTest {
override protected val isScratchFile: Boolean = true
protected val TestCompilerProfile = "TestCompilerProfile"
override protected def sharedProjectToken: SharedTestProjectToken = SharedTestProjectToken(None)
override protected def onFileCreated(file: PsiFile): Unit =
WorksheetFilePersistentSettings(file.getVirtualFile).setCompilerProfileName(TestCompilerProfile)
def testThatSpecifiedCompilerProfileSettingsAreUsedInScratchFile_NoError(): Unit = {
val profile = createCompilerProfile(TestCompilerProfile)
val newSettings = profile.getSettings.copy(higherKinds = true)
profile.setSettings(newSettings)
checkTextHasNoErrors(
s"""def foo[F[_], A](fa: F[A]): String = "123"
|""".stripMargin
)
}
def testThatSpecifiedCompilerProfileSettingsAreUsedInWorksheet_Scratchfile(): Unit = {
val profile = createCompilerProfile(TestCompilerProfile)
val newSettings = profile.getSettings.copy(higherKinds = true)
profile.setSettings(newSettings)
checkTextHasNoErrors(
s"""def foo[F[_], A](fa: F[A]): String = "123"
|""".stripMargin
)
}
//noinspection SameParameterValue
private def createCompilerProfile(name: String): ScalaCompilerSettingsProfile =
ScalaCompilerConfiguration.instanceIn(getProject).createCustomProfileForModule(name, getModule)
}
|
JetBrains/intellij-scala
|
scala/worksheet/test/org/jetbrains/plugins/scala/worksheet/WorksheetLanguageFeatureInspectionBaseTest.scala
|
Scala
|
apache-2.0
| 3,459
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.crud.rest.cors
import akka.http.scaladsl.model.HttpMethods._
import akka.http.scaladsl.model.HttpResponse
import akka.http.scaladsl.model.headers._
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.{Directive0, Route}
import com.bwsw.sj.crud.rest.utils.RestLiterals
import com.typesafe.config.ConfigFactory
/**
* Trait from akka-http-rest template https://github.com/ArchDev/akka-http-rest
*/
trait CorsSupport {
lazy val allowedOriginHeader = {
val config = ConfigFactory.load()
val sAllowedOrigin = config.getString(RestLiterals.corsAllowedOriginConfig)
if (sAllowedOrigin == "*")
`Access-Control-Allow-Origin`.*
else
`Access-Control-Allow-Origin`(HttpOrigin(sAllowedOrigin))
}
private def addAccessControlHeaders(): Directive0 = {
mapResponseHeaders { headers =>
allowedOriginHeader +:
`Access-Control-Allow-Credentials`(true) +:
`Access-Control-Allow-Headers`("Token", "Content-Type", "X-Requested-With") +:
headers
}
}
private def preflightRequestHandler: Route = options {
complete(HttpResponse(200).withHeaders(
`Access-Control-Allow-Methods`(OPTIONS, POST, PUT, GET, DELETE)
)
)
}
def corsHandler(r: Route) = addAccessControlHeaders() {
preflightRequestHandler ~ r
}
}
|
bwsw/sj-platform
|
core/sj-crud-rest/src/main/scala/com/bwsw/sj/crud/rest/cors/CorsSupport.scala
|
Scala
|
apache-2.0
| 2,147
|
/**
* Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0
* See accompanying LICENSE file.
*/
package kafka.manager
import java.util.Properties
import akka.actor.{ActorRef, ActorSystem, Kill, Props}
import akka.pattern._
import akka.util.Timeout
import com.typesafe.config.{Config, ConfigFactory}
import kafka.manager.actor.cluster.KafkaStateActor
import kafka.manager.base.LongRunningPoolConfig
import kafka.manager.features.ClusterFeatures
import kafka.manager.logkafka.{LogkafkaViewCacheActorConfig, LogkafkaViewCacheActor}
import kafka.manager.model.{ClusterContext, ClusterConfig, ActorModel}
import kafka.manager.utils.KafkaServerInTest
import ActorModel._
import kafka.test.SeededBroker
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.reflect.ClassTag
import scala.util.Try
/**
* @author hiral
*/
class TestLogkafkaViewCacheActor extends KafkaServerInTest {
private[this] val akkaConfig: Properties = new Properties()
akkaConfig.setProperty("pinned-dispatcher.type","PinnedDispatcher")
akkaConfig.setProperty("pinned-dispatcher.executor","thread-pool-executor")
private[this] val config : Config = ConfigFactory.parseProperties(akkaConfig)
private[this] val system = ActorSystem("test-logkafka-view-cache-actor",config)
private[this] val broker = new SeededBroker("lkvc-test",4)
override val kafkaServerZkPath = broker.getZookeeperConnectionString
private[this] var logkafkaStateActor : Option[ActorRef] = None
private[this] implicit val timeout: Timeout = 10.seconds
private[this] var logkafkaViewCacheActor : Option[ActorRef] = None
private[this] val defaultClusterConfig = ClusterConfig("test","0.8.2.0","localhost:2818",100,false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None)
private[this] val defaultClusterContext = ClusterContext(ClusterFeatures.from(defaultClusterConfig), defaultClusterConfig)
override protected def beforeAll(): Unit = {
super.beforeAll()
val clusterConfig = ClusterConfig("dev","0.8.2.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None)
val clusterContext = ClusterContext(ClusterFeatures.from(clusterConfig), clusterConfig)
val props = Props(classOf[KafkaStateActor],sharedCurator, defaultClusterContext)
logkafkaStateActor = Some(system.actorOf(props.withDispatcher("pinned-dispatcher"),"lksa"))
val lkvConfig = LogkafkaViewCacheActorConfig(logkafkaStateActor.get.path, clusterContext, LongRunningPoolConfig(2,100), FiniteDuration(10, SECONDS))
val lkvcProps = Props(classOf[LogkafkaViewCacheActor],lkvConfig)
logkafkaViewCacheActor = Some(system.actorOf(lkvcProps,"logkafka-view"))
logkafkaViewCacheActor.get ! BVForceUpdate
Thread.sleep(10000)
}
override protected def afterAll(): Unit = {
logkafkaViewCacheActor.foreach( _ ! Kill )
logkafkaStateActor.foreach( _ ! Kill )
system.shutdown()
Try(broker.shutdown())
super.afterAll()
}
private[this] def withLogkafkaViewCacheActor[Input,Output,FOutput]
(msg: Input)(fn: Output => FOutput)(implicit tag: ClassTag[Output]) : FOutput = {
require(logkafkaViewCacheActor.isDefined, "logkafkaViewCacheActor undefined!")
val future = ask(logkafkaViewCacheActor.get, msg).mapTo[Output]
val result = Await.result(future,10.seconds)
fn(result)
}
}
|
xuwei-k/kafka-manager
|
test/kafka/manager/TestLogkafkaViewCacheActor.scala
|
Scala
|
apache-2.0
| 3,430
|
package com.typesafe.sbt
package packager
import java.io.File
import java.nio.file.Files
import java.nio.file.attribute.{PosixFilePermission, PosixFilePermissions}
import scala.util.Try
/**
* Setting the file permissions
*/
object chmod {
/**
* Using java 7 nio API to set the permissions.
*
* @param file
* @param perms in octal format
*/
def apply(file: File, perms: String): Unit = {
val posix = permissions(perms)
val result = Try {
Files.setPosixFilePermissions(file.toPath, posix)
} recoverWith {
// in case of windows
case e: UnsupportedOperationException =>
Try {
file.setExecutable(perms contains PosixFilePermission.OWNER_EXECUTE)
file.setWritable(perms contains PosixFilePermission.OWNER_WRITE)
}
}
// propagate error
if (result.isFailure) {
val e = result.failed.get
sys.error("Error setting permissions " + perms + " on " + file.getAbsolutePath + ": " + e.getMessage)
}
}
}
/**
* Converts a octal unix permission representation into
* a java `PosiFilePermissions` compatible string.
*/
object permissions {
/**
* @param perms in octal format
* @return java 7 posix file permissions
*/
def apply(perms: String): java.util.Set[PosixFilePermission] =
PosixFilePermissions fromString convert(perms)
def convert(perms: String): String = {
require(perms.length == 4 || perms.length == 3, s"Permissions must have 3 or 4 digits, got [$perms]")
// ignore setuid/setguid/sticky bit
val i = if (perms.length == 3) 0 else 1
val user = Character getNumericValue (perms charAt i)
val group = Character getNumericValue (perms charAt i + 1)
val other = Character getNumericValue (perms charAt i + 2)
asString(user) + asString(group) + asString(other)
}
private def asString(perm: Int): String = perm match {
case 0 => "---"
case 1 => "--x"
case 2 => "-w-"
case 3 => "-wx"
case 4 => "r--"
case 5 => "r-x"
case 6 => "rw-"
case 7 => "rwx"
}
/** Enriches string with `oct` interpolator, parsing string as base 8 integer. */
implicit class OctalString(val sc: StringContext) extends AnyVal {
def oct(args: Any*) = Integer.parseInt(sc.s(args: _*), 8)
}
}
|
fsat/sbt-native-packager
|
src/main/scala/com/typesafe/sbt/packager/FileUtil.scala
|
Scala
|
bsd-2-clause
| 2,286
|
package model
import skinny.DBSettings
import skinny.test._
import org.scalatest.fixture.FlatSpec
import org.scalatest._
import scalikejdbc._
import scalikejdbc.scalatest._
import org.joda.time._
class AdFrameTableSpec extends FlatSpec with Matchers with DBSettings with AutoRollback {
}
|
yoshitakes/skinny-task-example
|
src/test/scala/model/AdFrameTableSpec.scala
|
Scala
|
mit
| 290
|
package com.nekopiano.scala.processing.sandbox.poc.pdf
import com.nekopiano.scala.processing.{ScalaPApplet, ScalaPVector}
import processing.core.PGraphics;
/**
* Created on 26/07/2016.
*/
class LineRenderingApp extends ScalaPApplet {
var pdf:PGraphics = null
var record = false
override def settings(): Unit = {
size(200, 200)
}
override def setup(): Unit = {
background(255)
smooth()
strokeWeight(15)
frameRate(24)
}
var startX = 0
override def draw(): Unit = {
stroke(random(50), random(255), random(255), 100)
line(startX, 0, random(0, width), height)
if (startX < width) {
startX += 1
} else {
startX = 0
}
}
}
object LineRenderingApp {
val BOOTING_CLASS_NAME = this.getClass.getName.dropRight(1)
def main(args: Array[String]) {
// This specifies the class to be instantiated.
val appletArgs = Array(BOOTING_CLASS_NAME)
if (args != null) {
ScalaPApplet.main(appletArgs ++ args)
} else {
ScalaPApplet.main(appletArgs)
}
}
}
|
lamusique/ScalaProcessing
|
samples/src/test/scala/com/nekopiano/scala/processing/sandbox/poc/pdf/LineRendering.scala
|
Scala
|
apache-2.0
| 1,055
|
package gitbucket.core.controller.api
import gitbucket.core.api.{ApiError, ApiLabel, CreateALabel, JsonFormat}
import gitbucket.core.controller.ControllerBase
import gitbucket.core.service._
import gitbucket.core.util.Implicits._
import gitbucket.core.util._
import org.scalatra.{Created, NoContent, UnprocessableEntity}
trait ApiIssueLabelControllerBase extends ControllerBase {
self: AccountService
with IssuesService
with LabelsService
with ReferrerAuthenticator
with WritableUsersAuthenticator =>
/*
* i. List all labels for this repository
* https://developer.github.com/v3/issues/labels/#list-all-labels-for-this-repository
*/
get("/api/v3/repos/:owner/:repository/labels")(referrersOnly { repository =>
JsonFormat(getLabels(repository.owner, repository.name).map { label =>
ApiLabel(label, RepositoryName(repository))
})
})
/*
* ii. Get a single label
* https://developer.github.com/v3/issues/labels/#get-a-single-label
*/
get("/api/v3/repos/:owner/:repository/labels/:labelName")(referrersOnly { repository =>
getLabel(repository.owner, repository.name, params("labelName")).map { label =>
JsonFormat(ApiLabel(label, RepositoryName(repository)))
} getOrElse NotFound()
})
/*
* iii. Create a label
* https://developer.github.com/v3/issues/labels/#create-a-label
*/
post("/api/v3/repos/:owner/:repository/labels")(writableUsersOnly { repository =>
(for {
data <- extractFromJsonBody[CreateALabel] if data.isValid
} yield {
LockUtil.lock(RepositoryName(repository).fullName) {
if (getLabel(repository.owner, repository.name, data.name).isEmpty) {
val labelId = createLabel(repository.owner, repository.name, data.name, data.color)
getLabel(repository.owner, repository.name, labelId).map { label =>
Created(JsonFormat(ApiLabel(label, RepositoryName(repository))))
} getOrElse NotFound()
} else {
// TODO ApiError should support errors field to enhance compatibility of GitHub API
UnprocessableEntity(
ApiError(
"Validation Failed",
Some("https://developer.github.com/v3/issues/labels/#create-a-label")
)
)
}
}
}) getOrElse NotFound()
})
/*
* iv. Update a label
* https://developer.github.com/v3/issues/labels/#update-a-label
*/
patch("/api/v3/repos/:owner/:repository/labels/:labelName")(writableUsersOnly { repository =>
(for {
data <- extractFromJsonBody[CreateALabel] if data.isValid
} yield {
LockUtil.lock(RepositoryName(repository).fullName) {
getLabel(repository.owner, repository.name, params("labelName")).map {
label =>
if (getLabel(repository.owner, repository.name, data.name).isEmpty) {
updateLabel(repository.owner, repository.name, label.labelId, data.name, data.color)
JsonFormat(
ApiLabel(
getLabel(repository.owner, repository.name, label.labelId).get,
RepositoryName(repository)
)
)
} else {
// TODO ApiError should support errors field to enhance compatibility of GitHub API
UnprocessableEntity(
ApiError(
"Validation Failed",
Some("https://developer.github.com/v3/issues/labels/#create-a-label")
)
)
}
} getOrElse NotFound()
}
}) getOrElse NotFound()
})
/*
* v. Delete a label
* https://developer.github.com/v3/issues/labels/#delete-a-label
*/
delete("/api/v3/repos/:owner/:repository/labels/:labelName")(writableUsersOnly { repository =>
LockUtil.lock(RepositoryName(repository).fullName) {
getLabel(repository.owner, repository.name, params("labelName")).map { label =>
deleteLabel(repository.owner, repository.name, label.labelId)
NoContent()
} getOrElse NotFound()
}
})
/*
* vi. List labels on an issue
* https://developer.github.com/v3/issues/labels/#list-labels-on-an-issue
*/
get("/api/v3/repos/:owner/:repository/issues/:id/labels")(referrersOnly { repository =>
JsonFormat(getIssueLabels(repository.owner, repository.name, params("id").toInt).map { l =>
ApiLabel(l, RepositoryName(repository.owner, repository.name))
})
})
/*
* vii. Add labels to an issue
* https://developer.github.com/v3/issues/labels/#add-labels-to-an-issue
*/
post("/api/v3/repos/:owner/:repository/issues/:id/labels")(writableUsersOnly { repository =>
JsonFormat(for {
data <- extractFromJsonBody[Seq[String]]
issueId <- params("id").toIntOpt
} yield {
data.map { labelName =>
val label = getLabel(repository.owner, repository.name, labelName).getOrElse(
getLabel(
repository.owner,
repository.name,
createLabel(repository.owner, repository.name, labelName)
).get
)
registerIssueLabel(repository.owner, repository.name, issueId, label.labelId, true)
ApiLabel(label, RepositoryName(repository.owner, repository.name))
}
})
})
/*
* viii. Remove a label from an issue
* https://developer.github.com/v3/issues/labels/#remove-a-label-from-an-issue
*/
delete("/api/v3/repos/:owner/:repository/issues/:id/labels/:name")(writableUsersOnly { repository =>
val issueId = params("id").toInt
val labelName = params("name")
getLabel(repository.owner, repository.name, labelName) match {
case Some(label) =>
deleteIssueLabel(repository.owner, repository.name, issueId, label.labelId, true)
JsonFormat(Seq(label))
case None =>
NotFound()
}
})
/*
* ix. Replace all labels for an issue
* https://developer.github.com/v3/issues/labels/#replace-all-labels-for-an-issue
*/
put("/api/v3/repos/:owner/:repository/issues/:id/labels")(writableUsersOnly { repository =>
JsonFormat(for {
data <- extractFromJsonBody[Seq[String]]
issueId <- params("id").toIntOpt
} yield {
deleteAllIssueLabels(repository.owner, repository.name, issueId, true)
data.map { labelName =>
val label = getLabel(repository.owner, repository.name, labelName).getOrElse(
getLabel(
repository.owner,
repository.name,
createLabel(repository.owner, repository.name, labelName)
).get
)
registerIssueLabel(repository.owner, repository.name, issueId, label.labelId, true)
ApiLabel(label, RepositoryName(repository.owner, repository.name))
}
})
})
/*
* x. Remove all labels from an issue
* https://developer.github.com/v3/issues/labels/#remove-all-labels-from-an-issue
*/
delete("/api/v3/repos/:owner/:repository/issues/:id/labels")(writableUsersOnly { repository =>
val issueId = params("id").toInt
deleteAllIssueLabels(repository.owner, repository.name, issueId, true)
NoContent()
})
/*
* xi Get labels for every issue in a milestone
* https://developer.github.com/v3/issues/labels/#get-labels-for-every-issue-in-a-milestone
*/
}
|
imeszaros/gitbucket
|
src/main/scala/gitbucket/core/controller/api/ApiIssueLabelControllerBase.scala
|
Scala
|
apache-2.0
| 7,250
|
/*
* Licensed to the Programming Language and Software Methodology Lab (PLSM)
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership.
* The PLSM licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.nccu.plsm.archetype
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{ FlatSpec, Matchers }
class HelloWorldTest extends FlatSpec with MockitoSugar with Matchers {
class HelloWordClass extends HelloWorld
"HelloWordClass" should """return "Mock" by method """ in {
val mockMain = mock[HelloWordClass]
when(mockMain.getMessage).thenReturn("Mock")
mockMain.getMessage should be("Mock")
}
}
|
NCCUCS-PLSM/sbt-archetype
|
sub/src/test/scala/edu/nccu/plsm/archetype/HelloWorldTest.scala
|
Scala
|
apache-2.0
| 1,291
|
package com.azavea.pointcloud.ingest
import com.azavea.pointcloud.ingest.conf.IngestConf
import io.pdal._
import geotrellis.pointcloud.pipeline._
import geotrellis.pointcloud.spark._
import geotrellis.pointcloud.spark.io._
import geotrellis.pointcloud.spark.io.hadoop._
import geotrellis.pointcloud.spark.io.s3._
import geotrellis.pointcloud.spark.tiling.Implicits.{withTilerMethods => withPCTilerMethods}
import geotrellis.proj4.CRS
import geotrellis.raster._
import geotrellis.spark._
import geotrellis.spark.io._
import geotrellis.spark.io.hadoop._
import geotrellis.spark.io.index.ZCurveKeyIndexMethod
import geotrellis.spark.io.kryo.KryoRegistrator
import geotrellis.spark.io.s3.S3LayerWriter
import geotrellis.spark.tiling._
import geotrellis.util._
import geotrellis.vector._
import org.apache.hadoop.fs.Path
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.{SparkConf, SparkContext}
object IngestPC {
def main(args: Array[String]): Unit = {
val opts = IngestConf.parse(args)
// val chunkPath = System.getProperty("user.dir") + "/chunks/"
val conf = new SparkConf()
.setIfMissing("spark.master", "local[*]")
.setAppName("PointCloudCount")
.set("spark.local.dir", "/data/spark")
.set("spark.serializer", classOf[KryoSerializer].getName)
.set("spark.kryo.registrator", classOf[KryoRegistrator].getName)
implicit val sc = new SparkContext(conf)
try {
val pipeline = Read("", opts.inputCrs) ~
ReprojectionFilter(opts.destCrs) ~
opts.maxValue.map { v => RangeFilter(Some(s"Z[0:$v]")) }
val source =
if(opts.nonS3Input)
HadoopPointCloudRDD(
new Path(opts.inputPath),
HadoopPointCloudRDD.Options.DEFAULT.copy(pipeline = pipeline)
).map { case (header, pc) => (header: PointCloudHeader, pc) } //.cache()
else
S3PointCloudRDD(
bucket = opts.S3InputPath._1,
prefix = opts.S3InputPath._2,
S3PointCloudRDD.Options.DEFAULT.copy(pipeline = pipeline)
).map { case (header, pc) => (header: PointCloudHeader, pc) } //.cache
val (extent, crs) =
source
.map { case (header, _) => (header.projectedExtent3D.extent3d.toExtent, header.crs) }
.reduce { case ((e1, c), (e2, _)) => (e1.combine(e2), c) }
val targetCrs = CRS.fromName(opts.destCrs)
val targetExtent =
opts.extent match {
case Some(e) => if (crs.epsgCode != targetCrs.epsgCode) e.reproject(crs, targetCrs) else e
case _ => if (crs.epsgCode != targetCrs.epsgCode) extent.reproject(crs, targetCrs) else extent
}
val layoutScheme = if (opts.pyramid || opts.zoomed) ZoomedLayoutScheme(targetCrs) else FloatingLayoutScheme(512)
val LayoutLevel(zoom, layout) = layoutScheme.levelFor(targetExtent, opts.cellSize)
val kb = KeyBounds(layout.mapTransform(targetExtent))
val md = TileLayerMetadata[SpatialKey](FloatConstantNoDataCellType, layout, targetExtent, targetCrs, kb)
val rdd = source.flatMap(_._2)
val tiled = withPCTilerMethods(rdd).tileToLayout(layout)
val layer = ContextRDD(tiled, md)
layer.cache()
if(opts.persist) {
val writer =
if(opts.nonS3Catalog) HadoopLayerWriter(new Path(opts.catalogPath))
else S3LayerWriter(opts.S3CatalogPath._1, opts.S3CatalogPath._2)
writer
.write[SpatialKey, PointCloud, TileLayerMetadata[SpatialKey]](
LayerId(opts.layerName, 0),
layer,
ZCurveKeyIndexMethod
)
} else layer.count()
layer.unpersist(blocking = false)
source.unpersist(blocking = false)
} finally sc.stop()
}
}
|
lossyrob/geotrellis-pointcloud-demo
|
src/app-backend/ingest/src/main/scala/com/azavea/pointcloud/ingest/IngestPC.scala
|
Scala
|
apache-2.0
| 3,752
|
package com.automatak.render.dnp3.objects.groups
import com.automatak.render.dnp3.objects.{FixedSize, FixedSizeField, ObjectGroup}
import FixedSizeField._
// common time of occurrence
object Group51 extends ObjectGroup {
def objects = List(Group51Var1, Group51Var2)
def group: Byte = 51
def desc: String = "Time and Date CTO"
def isEventGroup: Boolean = false
}
object Group51Var1 extends FixedSize(Group51, 1, "Absolute time, synchronized")(time48)
object Group51Var2 extends FixedSize(Group51, 2, "Absolute time, unsynchronized")(time48)
|
thiagoralves/OpenPLC_v2
|
dnp3/generation/dnp3/src/main/scala/com/automatak/render/dnp3/objects/groups/Group51.scala
|
Scala
|
gpl-3.0
| 552
|
import S99.P05._
import org.scalatest._
class P05Spec extends FlatSpec {
"reverse(xs)" should "reverse a list" in {
assertResult(List(8, 5, 3, 2, 1, 1)) {
reverse(List(1, 1, 2, 3, 5, 8))
}
}
}
|
gcanti/S-99
|
src/test/scala/P05Spec.scala
|
Scala
|
mit
| 213
|
package com.codiply.barrio.geometry
import scala.math.sqrt
import Point.Coordinates
final case class Point(id: String, location: Coordinates, data: String)
final case class PartitioningPlane(centroid1: Coordinates, centroid2: Coordinates)
final object Point {
type Coordinates = List[Double]
final object Coordinates {
def apply(doubles: Double*): Coordinates = doubles.toList
def innerProduct(x: Coordinates, y: Coordinates): Double = x.zip(y).map { t => t._1 * t._2 }.sum
def subtract(x: Coordinates, y: Coordinates): Coordinates = x.zip(y).map { t => t._1 - t._2 }
def add(x: Coordinates, y: Coordinates): Coordinates = x.zip(y).map { t => t._1 + t._2 }
def scale(b: Double, x: Coordinates): Coordinates = x.map(_ * b)
def normalize(x: Coordinates): Option[Coordinates] = {
val norm = sqrt(innerProduct(x, x))
if (norm > 0.0) {
Some(scale(1 / norm, x))
} else {
None
}
}
}
}
|
codiply/barrio
|
src/main/scala/com/codiply/barrio/geometry/Point.scala
|
Scala
|
apache-2.0
| 958
|
package org.bitcoins.testkitcore
import org.scalacheck.Gen
import org.scalatest.compatible.Assertion
import org.scalatest.exceptions.TestFailedException
import scala.annotation.tailrec
/** Provides extension methods, syntax
* and other handy implicit values that
* aid in testing.
*/
object Implicits {
/** Extension methods for Scalacheck generators */
implicit class GeneratorOps[T](private val gen: Gen[T]) extends AnyVal {
/** Gets a sample from this generator that's not `None` */
def sampleSome: T = {
val max = 10
@tailrec
def loop(counter: Int): T =
if (counter > max) {
sys.error(
s"Could not get a sample from generator after $max attempts")
} else {
gen.sample match {
case None => loop(counter + 1)
case Some(sample) => sample
}
}
loop(0)
}
}
/** Extension methods for sequences of assertions */
implicit class AssertionSeqOps(private val assertions: Seq[Assertion]) {
/** Flattens a sequence of assertions into only one */
def toAssertion: Assertion =
assertions match {
case Seq() =>
throw new TestFailedException(
message = "Cannot turn an empty list into an assertion!",
failedCodeStackDepth = 0)
// this should force all collection kinds to
// evaluate all their members, throwing when
// evaluating a bad one
case nonEmpty =>
nonEmpty.foreach(_ => ())
nonEmpty.last
}
}
}
|
bitcoin-s/bitcoin-s
|
testkit-core/src/main/scala/org/bitcoins/testkitcore/Implicits.scala
|
Scala
|
mit
| 1,564
|
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.datasource.schema
import org.apache.spark.sql.types.StructType
/**
* Knows the way to provide some Data Source schema
*/
trait SchemaProvider {
/**
* Provides the schema for current implementation of Data Source
* @return schema
*/
def schema(): StructType
}
|
Stratio/spark-mongodb
|
spark-mongodb/src/main/scala/com/stratio/datasource/schema/SchemaProvider.scala
|
Scala
|
apache-2.0
| 918
|
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.support.tree
import enumeratum.{Enum, EnumEntry}
import io.truthencode.ddo.support.SearchPrefix
import io.truthencode.ddo.support.StringUtils.Extensions
import io.truthencode.ddo.support.naming.{DisplayName, FriendlyDisplay}
import io.truthencode.ddo.support.points.SpendablePoints
import scala.collection.immutable
sealed trait TreeLike extends EnumEntry with DisplayName with FriendlyDisplay with SearchPrefix {
val pointType: SpendablePoints
override protected def nameSource: String =
entryName.splitByCase.toPascalCase
}
object TreeLike extends Enum[TreeLike] {
override lazy val values: immutable.IndexedSeq[TreeLike] =
ClassTrees.values ++ DestinySpheres.values ++ EpicDestiny.values ++ ReaperTrees.values ++ UniversalTrees.values
}
/**
* Represents Enhancement Trees such as Universal and Class Enhancement trees (Pale Master, Falconry
* etc)
*/
trait EnhancementTree extends TreeLike {
override val pointType: SpendablePoints = SpendablePoints.ActionPoints
}
/**
* Enhancement Lines with Class Restrictions
*/
trait ClassTree extends EnhancementTree
/**
* Enhancement Lines with no Class Restrictions
*/
trait UniversalTree extends EnhancementTree
trait ReaperTree extends TreeLike {
override val pointType: SpendablePoints = SpendablePoints.SurvivalPoints
}
/**
* One of the Epic Destiny Spheres (Primal, Arcane etc)
*/
trait DestinySphere extends TreeLike {
override val pointType: SpendablePoints = SpendablePoints.FatePoints
}
/**
* Epic Destinies lines within a given sphere such as Grandmaster of Flowers
*/
trait DestinyTree extends TreeLike {
override val pointType: SpendablePoints = SpendablePoints.EpicDestinyPoints
}
|
adarro/ddo-calc
|
subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/support/tree/TreeLike.scala
|
Scala
|
apache-2.0
| 2,352
|
package jsm4s.attribute
import scala.collection.SortedMap
class EnumAttribute(val values: SortedMap[String, Int], val offset:Int) extends Attribute {
private val mapping = values.keys.zipWithIndex.toMap
override def apply(value: String) = Seq(offset + mapping(value))
override def size: Int = values.size
override def toString = s"$offset:${values.mkString}"
}
|
DmitryOlshansky/jsm4s
|
src/main/scala/jsm4s/attribute/EnumAttribute.scala
|
Scala
|
gpl-2.0
| 374
|
/*
* Accio is a platform to launch computer science experiments.
* Copyright (C) 2016-2018 Vincent Primault <v.primault@ucl.ac.uk>
*
* Accio is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Accio is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Accio. If not, see <http://www.gnu.org/licenses/>.
*/
package fr.cnrs.liris.util.scrooge
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream}
import com.twitter.io.Buf
import com.twitter.scrooge.{ThriftStruct, ThriftStructCodec}
import com.twitter.util.Base64StringEncoder
import org.apache.thrift.protocol.{TBinaryProtocol, TCompactProtocol, TProtocolFactory}
import org.apache.thrift.transport.TIOStreamTransport
trait ScroogeSerializer {
def fromString[T <: ThriftStruct](str: String, codec: ThriftStructCodec[T]): T =
fromBytes(Base64StringEncoder.decode(str), codec)
def fromBytes[T <: ThriftStruct](bytes: Array[Byte], codec: ThriftStructCodec[T]): T =
read(new ByteArrayInputStream(bytes), codec)
def read[T <: ThriftStruct](is: InputStream, codec: ThriftStructCodec[T]): T = {
val protocol = protocolFactory.getProtocol(new TIOStreamTransport(is))
codec.decode(protocol)
}
def toString[T <: ThriftStruct](obj: T): String = Base64StringEncoder.encode(toBytes(obj))
def toBytes[T <: ThriftStruct](obj: T): Array[Byte] = {
val baos = new ByteArrayOutputStream
write(obj, baos)
baos.toByteArray
}
def toBuf[T <: ThriftStruct](obj: T): Buf = Buf.ByteArray.Owned(toBytes(obj))
def write[T <: ThriftStruct](obj: T, os: OutputStream): Unit = {
val protocol = protocolFactory.getProtocol(new TIOStreamTransport(os))
obj.write(protocol)
}
protected def protocolFactory: TProtocolFactory
}
object BinaryScroogeSerializer extends ScroogeSerializer {
override protected val protocolFactory = new TBinaryProtocol.Factory
}
object CompactScroogeSerializer extends ScroogeSerializer {
override protected val protocolFactory = new TCompactProtocol.Factory
}
object TextScroogeSerializer extends ScroogeSerializer {
override protected val protocolFactory = new TTextProtocol.Factory
}
|
privamov/accio
|
accio/java/fr/cnrs/liris/util/scrooge/ScroogeSerializer.scala
|
Scala
|
gpl-3.0
| 2,598
|
package lila.relay
import akka.actor._
import akka.pattern.pipe
import scala.concurrent.duration._
import lila.hub.actorApi.map.Tell
private[relay] final class FICS(config: FICS.Config) extends Actor with Stash with LoggingFSM[FICS.State, Option[FICS.Request]] {
import FICS._
import Telnet._
import GameEvent._
import command.Command
var send: String => Unit = _
val telnet = context.actorOf(Props(classOf[Telnet], () => config.remote, self), name = "telnet")
startWith(Connect, none)
when(Connect) {
case Event(Connection(s), _) =>
send = s
goto(Login)
}
// when(Login) {
// case Event(In(data), _) if data endsWith "login: " =>
// send(config.login)
// stay
// case Event(In(data), _) if data endsWith "password: " =>
// send(config.password)
// telnet ! BufferUntil(EOM.some)
// goto(Configure)
// }
when(Login) {
case Event(In(data), _) if data endsWith "login: " =>
send("guest")
goto(Enter)
case Event(in: In, _) => stay
}
when(Enter) {
case Event(In(data), _) if data contains "Press return to enter the server" =>
telnet ! BufferUntil(EOM.some)
send("")
for (v <- Seq("seek", "shout", "cshout", "pin", "gin")) send(s"set $v 0")
for (c <- Seq(1, 4, 53)) send(s"- channel $c")
send("set kiblevel 3000") // shut up if your ELO is < 3000
send("style 12")
stay
case Event(In(data), _) if data contains "Style 12 set." => goto(Throttle)
case Event(in: In, _) => stay
}
when(Ready) {
case Event(cmd: Command, _) =>
send(cmd.str)
goto(Run) using Request(cmd, sender).some
case Event(Observe(ficsId), _) =>
send(s"observe $ficsId")
stay
case Event(Unobserve(ficsId), _) =>
send(s"unobserve $ficsId")
stay
}
when(Run, stateTimeout = 20 second) {
case Event(in: In, Some(Request(cmd, replyTo))) =>
val lines = handle(in)
cmd parse lines match {
case Some(res) =>
replyTo ! res
goto(Throttle) using none
case None =>
log(lines)
stay
}
case Event(StateTimeout, req) =>
req.foreach { r =>
r.replyTo ! Status.Failure(new Exception(s"FICS:Run timeout on ${r.cmd.str}"))
}
goto(Ready) using none
}
when(Throttle, stateTimeout = 500 millis) {
case Event(StateTimeout, _) => goto(Ready) using none
}
whenUnhandled {
case Event(_: Stashable, _) =>
stash()
stay
case Event(in: In, _) =>
log(handle(in))
stay
}
onTransition {
case _ -> Ready => unstashAll()
}
def handle(in: In): List[String] = in.lines.foldLeft(List.empty[String]) {
case (lines, line) =>
Move(line) orElse Clock(line) orElse Resign(line) orElse Draw(line) orElse Limited(line) map {
case move: Move =>
context.parent ! move
lines
case clock: Clock =>
context.parent ! clock
lines
case resign: Resign =>
context.parent ! resign
lines
case draw: Draw =>
context.parent ! draw
lines
case Limited =>
println(s"FICS ERR $line")
lines
} getOrElse {
line :: lines
}
}.reverse
def log(lines: List[String]) {
lines filterNot noise foreach { l =>
println(s"FICS[$stateName] $l")
}
// lines filter noise foreach { l =>
// println(s" (noise) [$stateName] $l")
// }
}
val noiseR = List(
"""^\\ .*""".r,
"""^:$""".r,
"""^fics%""".r,
"""^You will not.*""".r,
""".*You are now observing.*""".r,
""".*You are already observing.*""".r,
"""^Game \d+: .*""".r,
""".*To find more about Relay.*""".r,
""".*You are already observing game \d+""".r,
""".*Removing game \d+.*""".r,
""".*There are no tournaments in progress.*""".r,
""".*Challenge from.*""".r,
""".*who was challenging you.*""".r,
// """.*in the history of both players.*""".r,
// """.*will be closed in a few minutes.*""".r,
"""^\(told relay\)$""".r,
"""^relay\(.+\)\[\d+\] kibitzes: .*""".r,
// """Welcome to the Free Internet Chess Server""".r,
// """Starting FICS session""".r,
""".*ROBOadmin.*""".r,
""".*ANNOUNCEMENT.*""".r)
def noise(str: String) = noiseR exists matches(str)
def matches(str: String)(r: scala.util.matching.Regex) = r.pattern.matcher(str).matches
}
object FICS {
case class Config(host: String, port: Int, login: String, password: String, enabled: Boolean) {
def remote = new java.net.InetSocketAddress(host, port)
}
sealed trait State
case object Connect extends State
case object Login extends State
case object Enter extends State
case object Configure extends State
case object Ready extends State
case object Run extends State
case object Throttle extends State
case class Request(cmd: command.Command, replyTo: ActorRef)
trait Stashable
case class Observe(ficsId: Int) extends Stashable
case class Unobserve(ficsId: Int) extends Stashable
case object Limited {
val R = "You are already observing the maximum number of games"
def apply(str: String): Option[Limited.type] = str contains R option (Limited)
}
private val EOM = "fics% "
}
|
pavelo65/lila
|
modules/relay/src/main/FICS.scala
|
Scala
|
mit
| 5,369
|
package net.liftmodules.fobo.snippet.FoBo
import xml._
import org.specs2.mutable.Specification
import net.liftweb.http._
import net.liftweb.common._
import net.liftweb.util.Helpers._
object ResourceSpecs extends Specification {
val session = new LiftSession("", randomString(20), Empty)
"Using split and trim on a comma separated (resource) string to transform it to a distinct List " should {
"remove duplicates and preserve the order of the items" in {
import scala.collection._
val expected: List[String] = List("a", "b", "c", "d", "e", "f", "g", "h")
val testdata = List("a,a,b,c,d,e,f,g,h") //width duplicates
val out: List[String] = testdata.flatMap(
x =>
x.split(",")
.map(x => x.trim)
.toList
.distinct) //.map(_.trim).toList) //.toList//.openOr(Set())
out mustEqual expected
}
}
"FoBo Resource Snippet" should {
"with FoBo.Resources.injectJS?resources=d,c,b,a produce (in order) d.js, c.js, b.js, a.js toserve script tags" in {
//different correkt ansvers in different versions of Scala
val res1 =
"""<script src="/classpath/fobo/d.js" type="text/javascript"/><script src="/classpath/fobo/c.js" type="text/javascript"/><script src="/classpath/fobo/b.js" type="text/javascript"/><script src="/classpath/fobo/a.js" type="text/javascript"/>""";
val res2 =
"""<script src="/classpath/fobo/d.js" type="text/javascript"></script><script src="/classpath/fobo/c.js" type="text/javascript"></script><script src="/classpath/fobo/b.js" type="text/javascript"></script><script src="/classpath/fobo/a.js" type="text/javascript"></script>""";
val m = collection.immutable.HashMap("resources" -> "d,c,b,a")
val out =
S.statelessInit(Req.nil) {
S.withAttrs(S.mapToAttrs(m)) {
val snippet = new Resources()
snippet.injectJS(<div></div>).toString
}
}
//converting to string to make sure we are really comparing
(out must ==/(res1)) or (out must ==/(res2))
}
// //prettify,lang-scala,jquery,bootstrap,angular,angular-animate,ui-bootstrap-tpls,ng-grid
// "with FoBo.Resources.injectJS?resources=a,b,c,d,e,f,g,h produce (in order) pr.js, l-s.js, jq.js, bo.js an.js a-a.js u-b-t.js n-g.js toserve script tags" in {
// //different correct answers in different versions of Scala
// val res1 = """<script src="/classpath/fobo/a.js" type="text/javascript"/><script src="/classpath/fobo/b.js" type="text/javascript"/><script src="/classpath/fobo/c.js" type="text/javascript"/><script src="/classpath/fobo/d.js" type="text/javascript"/><script src="/classpath/fobo/e.js" type="text/javascript"/><script src="/classpath/fobo/f.js" type="text/javascript"/><script src="/classpath/fobo/g.js" type="text/javascript"/><script src="/classpath/fobo/h.js" type="text/javascript"/>""";
// val res2 = """<script src="/classpath/fobo/a.js.js" type="text/javascript"></script><script src="/classpath/fobo/b.js" type="text/javascript"></script><script src="/classpath/fobo/c.js" type="text/javascript"></script><script src="/classpath/fobo/d.js" type="text/javascript"></script><script src="/classpath/fobo/e.js" type="text/javascript"/><script src="/classpath/fobo/f.js" type="text/javascript"/><script src="/classpath/fobo/g.js" type="text/javascript"/><script src="/classpath/fobo/h.js" type="text/javascript"/>""";
// //S.attr("resources").map(_.split(',').map(_.trim).toSet).openOr(Set())
// val l = List("a,b,c,d,e,f,g,h")
// val rs = l.map(_.split(','))//.toList //.map(_.trim).toList) //.toList//.openOr(Set())
// val m = collection.immutable.HashMap("resources" -> rs.toString)
// val out =
// S.statelessInit(Req.nil) {
// S.withAttrs(S.mapToAttrs(m)){
// val snippet = new Resources()
// snippet.injectJS(<div></div>).toString
// }
// }
// //converting to string to make sure we are really comparing
// (out must ==/(res1)) or (out must ==/(res2))
// }
"with FoBo.Resources.injectJS?resources=a,a only produce one toserve script tag, a.js" in {
val res1 =
"""<script src="/classpath/fobo/a.js" type="text/javascript"/>""";
val res2 =
"""<script src="/classpath/fobo/a.js" type="text/javascript"></script>""";
val m = collection.immutable.HashMap("resources" -> "a,a")
val out =
S.statelessInit(Req.nil) {
S.withAttrs(S.mapToAttrs(m)) {
val snippet = new Resources()
snippet.injectJS(<div></div>).toString
}
}
(out must ==/(res1)) or (out must ==/(res2))
}
"with FoBo.Resources.injectCSS?resources=b,c,a,d produce in order b.css, c.css, a.css, d.css toserve css link tags" in {
val res1 =
"""<link href="/classpath/fobo/b.css" rel="stylesheet" type="text/css"/><link href="/classpath/fobo/c.css" rel="stylesheet" type="text/css"/><link href="/classpath/fobo/a.css" rel="stylesheet" type="text/css"/><link href="/classpath/fobo/d.css" rel="stylesheet" type="text/css"/>""";
val res2 =
"""<link href="/classpath/fobo/b.css" rel="stylesheet" type="text/css"></link><link href="/classpath/fobo/c.css" rel="stylesheet" type="text/css"></link><link href="/classpath/fobo/a.css" rel="stylesheet" type="text/css"></link><link href="/classpath/fobo/d.css" rel="stylesheet" type="text/css"></link>""";
val m = collection.immutable.HashMap("resources" -> "b,c,a,d")
val out =
S.statelessInit(Req.nil) {
S.withAttrs(S.mapToAttrs(m)) {
val snippet = new Resources()
snippet.injectCSS(<div></div>).toString
}
}
(out must ==/(res1)) or (out must ==/(res2))
}
"with FoBo.Resources.injectCSS?resources=a,a only produce one toserve css link tag, a.css" in {
val res1 =
"""<link href="/classpath/fobo/a.css" rel="stylesheet" type="text/css"/>""";
val res2 =
"""<link href="/classpath/fobo/a.css" rel="stylesheet" type="text/css"></link>""";
val m = collection.immutable.HashMap("resources" -> "a,a")
val out =
S.statelessInit(Req.nil) {
S.withAttrs(S.mapToAttrs(m)) {
val snippet = new Resources()
snippet.injectCSS(<div></div>).toString
}
}
(out must ==/(res1)) or (out must ==/(res2))
}
}
}
|
karma4u101/FoBo
|
FoBo/FoBo-API/src/test/scala/net/liftmodules/fobo/snippet/FoBo/ResourcesSpec.scala
|
Scala
|
apache-2.0
| 6,467
|
object TextExamples {
class Rational(n: Int, d: Int) extends AnyRef {
private def gcd(x: Int, y: Int): Int = {
if (x == 0) y
else if (x < 0) gcd(-x, y)
else if (y < 0) gcd(x, -y)
else gcd(y % x, x)
}
private val g = gcd(n, d)
val numer: Int = n/g
val denom: Int = d/g
def +(that: Rational) =
new Rational(numer * that.denom + that.numer * denom,
denom * that.denom)
def -(that: Rational) =
new Rational(numer * that.denom - that.numer * denom,
denom * that.denom)
def *(that: Rational) =
new Rational(numer * that.numer, denom * that.denom)
def /(that: Rational) =
new Rational(numer * that.denom, denom * that.numer)
override def toString = ""+ numer +"/"+ denom
def square = new Rational(numer * numer, denom * denom)
}
abstract class IntSet {
def incl(x: Int): IntSet
def contains(x: Int): Boolean
}
class EmptySet extends IntSet {
def contains(x: Int): Boolean = false
def incl(x: Int): IntSet = new NonEmptySet(x, new EmptySet, new EmptySet)
}
class NonEmptySet(elem: Int, left: IntSet, right: IntSet) extends IntSet {
def contains(x: Int): Boolean =
if (x < elem) left contains x
else if (x > elem) right contains x
else true
def incl(x: Int): IntSet =
if (x < elem) new NonEmptySet(elem, left incl x, right)
else if (x > elem) new NonEmptySet(elem, left, right incl x)
else this
}
def main(args: Array[String]) {
/*var i = 1
var x = new Rational(0, 1)
while (i <= 10) {
x += new Rational(1, i)
i += 1
}
println(x)
val r = new Rational(3, 4)
println(r.square)
*/
val x = new Rational(1,3)
val y = new Rational(5,7)
val z = new Rational(3,2)
println(x - y - z)
}
}
|
promlow/books-and-tutorials
|
scala-by-example/Notes-Chapter6.scala
|
Scala
|
mit
| 1,828
|
/**
* Copyright 2016, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.workflowexecutor.communication.mq.json
import java.nio.charset.Charset
import io.deepsense.deeplang.CatalogRecorder
import io.deepsense.models.json.graph.GraphJsonProtocol.GraphReader
import io.deepsense.models.json.workflow.InferredStateJsonProtocol
import io.deepsense.models.json.workflow.InferredStateJsonProtocol._
import io.deepsense.models.json.workflow.ExecutionReportJsonProtocol._
import io.deepsense.models.workflows.{ExecutionReport, InferredState}
import io.deepsense.workflowexecutor.communication.message.global._
import io.deepsense.workflowexecutor.communication.message.global.HeartbeatJsonProtocol._
import io.deepsense.workflowexecutor.communication.message.global.PoisonPillJsonProtocol._
import io.deepsense.workflowexecutor.communication.message.global.ReadyJsonProtocol._
import io.deepsense.workflowexecutor.communication.message.global.LaunchJsonProtocol._
object Global {
val charset = Charset.forName("UTF-8")
val dOperationsCatalog = CatalogRecorder.resourcesCatalogRecorder.catalogs.dOperationsCatalog
val graphReader = new GraphReader(dOperationsCatalog)
val inferredStateJsonProtocol = InferredStateJsonProtocol(graphReader)
import inferredStateJsonProtocol._
import Constants.MessagesTypes._
object HeartbeatDeserializer extends DefaultJsonMessageDeserializer[Heartbeat](heartbeat)
object HeartbeatSerializer extends DefaultJsonMessageSerializer[Heartbeat](heartbeat)
object PoisonPillDeserializer extends DefaultJsonMessageDeserializer[PoisonPill](poisonPill)
object PoisonPillSerializer extends DefaultJsonMessageSerializer[PoisonPill](poisonPill)
object ReadyDeserializer extends DefaultJsonMessageDeserializer[Ready](ready)
object ReadySerializer extends DefaultJsonMessageSerializer[Ready](ready)
object LaunchDeserializer extends DefaultJsonMessageDeserializer[Launch](launch)
object LaunchSerializer extends DefaultJsonMessageSerializer[Launch](launch)
object ExecutionReportSerializer extends DefaultJsonMessageSerializer[ExecutionReport](executionReport)
object ExecutionReportDeserializer extends DefaultJsonMessageDeserializer[ExecutionReport](executionReport)
object InferredStateSerializer extends DefaultJsonMessageSerializer[InferredState](inferredState)
object InferredStateDeserializer extends DefaultJsonMessageDeserializer[InferredState](inferredState)
object GlobalMQSerializer extends JsonMQSerializer(
Seq(HeartbeatSerializer,
PoisonPillSerializer,
ReadySerializer,
LaunchSerializer,
ExecutionReportSerializer,
InferredStateSerializer
))
object GlobalMQDeserializer extends JsonMQDeserializer(
Seq(HeartbeatDeserializer,
PoisonPillDeserializer,
ReadyDeserializer,
LaunchDeserializer,
ExecutionReportDeserializer,
InferredStateDeserializer
))
}
|
deepsense-io/seahorse-workflow-executor
|
workflowexecutormqprotocol/src/main/scala/io/deepsense/workflowexecutor/communication/mq/json/Global.scala
|
Scala
|
apache-2.0
| 3,450
|
/*
Copyright 2015 Mate1 inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import sbt.Keys._
import sbt._
import sbtavro.SbtAvro._
object Build extends Build {
// Global build settings
override lazy val settings = super.settings ++ Seq(
name := "KafkaAvroTools",
version := "1.0.2",
organization := "com.mate1",
scalaVersion := "2.10.4",
parallelExecution in ThisBuild := false,
publishArtifact in packageDoc := false,
publishArtifact in packageSrc := false,
publishArtifact in GlobalScope in Test := true,
sources in doc := Seq.empty,
sourcesInBase := false,
resolvers ++= Seq(Resolver.mavenLocal,
"Sonatype OSS Releases" at "http://oss.sonatype.org/content/repositories/releases/",
"Typesafe Repository" at "http://repo.typesafe.com/typesafe/releases/",
"Mate1 Repository" at "https://raw.github.com/mate1/maven/master/public/"
),
javacOptions ++= Seq("-g:none"),
scalacOptions ++= Seq("-feature", "-g:none")
)
// Scalamail project
lazy val kafkaAvroUtils = Project("kafka-avro-tools", file("."))
.settings(
libraryDependencies ++= Seq(
// General dependencies
"com.mate1.avro" %% "schema-repo-client" % "0.1-SNAPSHOT",
"com.typesafe" % "config" % "1.2.1",
"org.apache.avro" % "avro" % "1.7.5",
"org.apache.kafka" %% "kafka" % "0.8.1" exclude("javax.jms", "jms") exclude("com.sun.jdmk", "jmxtools") exclude("com.sun.jmx", "jmxri"),
"org.apache.zookeeper" % "zookeeper" % "3.4.5-cdh4.2.1" exclude("junit", "junit"),
// Test dependencies
"commons-io" % "commons-io" % "2.4" % Test,
"org.scalatest" %% "scalatest" % "2.2.1" % Test
)
)
.settings(avroSettings)
.settings(sourceDirectory in avroConfig <<= (sourceDirectory in Test)(_ / "resources/avro"))
.settings(net.virtualvoid.sbt.graph.Plugin.graphSettings: _*)
}
|
mate1/kafka-avro-tools
|
project/Build.scala
|
Scala
|
apache-2.0
| 2,433
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions.codegen
import java.nio.charset.StandardCharsets
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.util.GenericArrayData
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
/**
* A test suite for generated projections
*/
class GeneratedProjectionSuite extends SparkFunSuite {
test("generated projections on wider table") {
val N = 1000
val wideRow1 = new GenericInternalRow((1 to N).toArray[Any])
val schema1 = StructType((1 to N).map(i => StructField("", IntegerType)))
val wideRow2 = new GenericInternalRow(
(1 to N).map(i => UTF8String.fromString(i.toString)).toArray[Any])
val schema2 = StructType((1 to N).map(i => StructField("", StringType)))
val joined = new JoinedRow(wideRow1, wideRow2)
val joinedSchema = StructType(schema1 ++ schema2)
val nested = new JoinedRow(InternalRow(joined, joined), joined)
val nestedSchema = StructType(
Seq(StructField("", joinedSchema), StructField("", joinedSchema)) ++ joinedSchema)
// test generated UnsafeProjection
val unsafeProj = UnsafeProjection.create(nestedSchema)
val unsafe: UnsafeRow = unsafeProj(nested)
(0 until N).foreach { i =>
val s = UTF8String.fromString((i + 1).toString)
assert(i + 1 === unsafe.getInt(i + 2))
assert(s === unsafe.getUTF8String(i + 2 + N))
assert(i + 1 === unsafe.getStruct(0, N * 2).getInt(i))
assert(s === unsafe.getStruct(0, N * 2).getUTF8String(i + N))
assert(i + 1 === unsafe.getStruct(1, N * 2).getInt(i))
assert(s === unsafe.getStruct(1, N * 2).getUTF8String(i + N))
}
// test generated SafeProjection
val safeProj = FromUnsafeProjection(nestedSchema)
val result = safeProj(unsafe)
// Can't compare GenericInternalRow with JoinedRow directly
(0 until N).foreach { i =>
val r = i + 1
val s = UTF8String.fromString((i + 1).toString)
assert(r === result.getInt(i + 2))
assert(s === result.getUTF8String(i + 2 + N))
assert(r === result.getStruct(0, N * 2).getInt(i))
assert(s === result.getStruct(0, N * 2).getUTF8String(i + N))
assert(r === result.getStruct(1, N * 2).getInt(i))
assert(s === result.getStruct(1, N * 2).getUTF8String(i + N))
}
// test generated MutableProjection
val exprs = nestedSchema.fields.zipWithIndex.map { case (f, i) =>
BoundReference(i, f.dataType, true)
}
val mutableProj = GenerateMutableProjection.generate(exprs)
val row1 = mutableProj(result)
assert(result === row1)
val row2 = mutableProj(result)
assert(result === row2)
}
test("generated unsafe projection with array of binary") {
val row = InternalRow(
Array[Byte](1, 2),
new GenericArrayData(Array(Array[Byte](1, 2), null, Array[Byte](3, 4))))
val fields = (BinaryType :: ArrayType(BinaryType) :: Nil).toArray[DataType]
val unsafeProj = UnsafeProjection.create(fields)
val unsafeRow: UnsafeRow = unsafeProj(row)
assert(java.util.Arrays.equals(unsafeRow.getBinary(0), Array[Byte](1, 2)))
assert(java.util.Arrays.equals(unsafeRow.getArray(1).getBinary(0), Array[Byte](1, 2)))
assert(unsafeRow.getArray(1).isNullAt(1))
assert(unsafeRow.getArray(1).getBinary(1) === null)
assert(java.util.Arrays.equals(unsafeRow.getArray(1).getBinary(2), Array[Byte](3, 4)))
val safeProj = FromUnsafeProjection(fields)
val row2 = safeProj(unsafeRow)
assert(row2 === row)
}
test("padding bytes should be zeroed out") {
val types = Seq(BooleanType, ByteType, ShortType, IntegerType, FloatType, BinaryType,
StringType)
val struct = StructType(types.map(StructField("", _, true)))
val fields = Array[DataType](StringType, struct)
val unsafeProj = UnsafeProjection.create(fields)
val innerRow = InternalRow(false, 1.toByte, 2.toShort, 3, 4.0f,
"".getBytes(StandardCharsets.UTF_8),
UTF8String.fromString(""))
val row1 = InternalRow(UTF8String.fromString(""), innerRow)
val unsafe1 = unsafeProj(row1).copy()
// create a Row with long String before the inner struct
val row2 = InternalRow(UTF8String.fromString("a_long_string").repeat(10), innerRow)
val unsafe2 = unsafeProj(row2).copy()
assert(unsafe1.getStruct(1, 7) === unsafe2.getStruct(1, 7))
val unsafe3 = unsafeProj(row1).copy()
assert(unsafe1 === unsafe3)
assert(unsafe1.getStruct(1, 7) === unsafe3.getStruct(1, 7))
}
}
|
u2009cf/spark-radar
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratedProjectionSuite.scala
|
Scala
|
apache-2.0
| 5,406
|
package filodb.stress
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.sql.{DataFrame, SaveMode, SQLContext}
import scala.util.Random
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
import filodb.spark._
/**
* An in-memory concurrency and query stress tester
* 1) First it ingests the NYC Taxi dataset into memory
* 2) Then runs tons of queries on it
*
* To prepare, download the first month's worth of data from http://www.andresmh.com/nyctaxitrips/
*
* Recommended to run this with the first million rows only as a first run to make sure everything works.
* Run it with LOTS of memory - 8GB recommended
*/
object InMemoryQueryStress extends App {
val taxiCsvFile = args(0)
val numRuns = 50 // Make this higher when doing performance profiling
def puts(s: String): Unit = {
//scalastyle:off
println(s)
//scalastyle:on
}
// Queries
val medallions = Array("23A89BC906FBB8BD110677FBB0B0A6C5",
"3F8D8853F7EF89A7630565DDA38E9526",
"3FE07421C46041F4801C7711F5971C63",
"789B8DC7F3CB06A645B0CDC24591B832",
"18E80475A4E491022BC2EF8559DABFD8",
"761033F2C6F96EBFA9F578E968FDEDE5",
"E4C72E0EE95C31D6B1FEFCF3F876EF90",
"AF1421FCAA4AE912BDFC996F8A9B5675",
"FB085B55ABF581ADBAD3E16283C78C01",
"29CBE2B638D6C9B7239D2CA7A72A70E9")
// trip info for a single driver within a given time range
val singleDriverQueries = (1 to 20).map { i =>
val medallion = medallions(Random.nextInt(medallions.size))
s"SELECT avg(trip_distance), avg(passenger_count) from nyc_taxi where medallion = '$medallion'" +
s" AND pickup_datetime > '2013-01-15T00Z' AND pickup_datetime < '2013-01-22T00Z'"
}
// average trip distance by day for several days
val allQueries = singleDriverQueries
// Setup SparkContext, etc.
val conf = (new SparkConf).setMaster("local[8]")
.setAppName("test")
.set("spark.filodb.store", "in-memory")
.set("spark.sql.shuffle.partitions", "4")
.set("spark.scheduler.mode", "FAIR")
.set("spark.ui.enabled", "false") // No need for UI when doing perf stuff
.set("spark.filodb.memtable.min-free-mb", "50")
val sc = new SparkContext(conf)
val sql = new SQLContext(sc)
// Ingest file - note, this will take several minutes
puts("Starting ingestion...")
val csvDF = sql.read.format("com.databricks.spark.csv").
option("header", "true").option("inferSchema", "true").
load(taxiCsvFile)
csvDF.write.format("filodb.spark").
option("dataset", "nyc_taxi").
option("row_keys", "hack_license,pickup_datetime").
option("segment_key", ":timeslice pickup_datetime 6d").
option("partition_keys", ":stringPrefix medallion 2").
mode(SaveMode.Overwrite).save()
puts("Ingestion done.")
val taxiDF = sql.filoDataset("nyc_taxi")
taxiDF.registerTempTable("nyc_taxi")
val numRecords = taxiDF.count()
puts(s"Ingested $numRecords records")
// run queries
import scala.concurrent.ExecutionContext.Implicits.global
val cachedDF = new collection.mutable.HashMap[String, DataFrame]
def getCachedDF(query: String): DataFrame =
cachedDF.getOrElseUpdate(query, sql.sql(query))
def runQueries(queries: Array[String], numQueries: Int = 1000): Unit = {
val startMillis = System.currentTimeMillis
val futures = (0 until numQueries).map(i => getCachedDF(queries(Random.nextInt(queries.size))).rdd.collectAsync)
val fut = Future.sequence(futures.asInstanceOf[Seq[Future[Array[_]]]])
Await.result(fut, Duration.Inf)
val endMillis = System.currentTimeMillis
val qps = numQueries / ((endMillis - startMillis) / 1000.0)
puts(s"Ran $numQueries queries in ${endMillis - startMillis} millis. QPS = $qps")
}
puts("Warming up...")
runQueries(allQueries.toArray, 100)
Thread sleep 2000
puts("Now running queries for real...")
(0 until numRuns).foreach { i => runQueries(allQueries.toArray) }
// clean up!
FiloDriver.shutdown()
sc.stop()
}
|
markhamstra/FiloDB
|
stress/src/main/scala/filodb.stress/InMemoryQueryStress.scala
|
Scala
|
apache-2.0
| 4,343
|
package fr.laas.fape.structures
import java.util
import java.util.function.Predicate
import scala.collection.JavaConverters._
class ISet[T](val s : Set[T]) extends java.lang.Iterable[T] {
def this() = this(Set[T]())
def this(l : java.lang.Iterable[T]) = this(l.asScala.toSet)
class ISetIterator[X](private var s : Set[X]) extends java.util.Iterator[X] {
val it = s.iterator
override def hasNext: Boolean = it.hasNext
override def next(): X = it.next()
override def remove(): Unit = ???
}
override def equals(o: Any): Boolean = o match {
case o: ISet[_] => s == o.s
case _ => false
}
override def hashCode(): Int = s.hashCode()
def asScala = s
def asJava = s.asJava
def size(): Int = s.size
def head(): T = s.head
def withoutAll(p1: util.Collection[T]): ISet[T] =
if(p1.isEmpty)
this
else
new ISet(s.filter(e => !p1.contains(e)))
def onlyWithAll(p1: util.Collection[_]): ISet[T] =
new ISet(s.filter(e => p1.contains(e)))
def without(p1: scala.Any): ISet[T] =
new ISet(s.filter(e => !(p1 == e)))
def filter(f: T => Boolean) : ISet[T] =
new ISet[T](s.filter(f))
def filter(f: Predicate[T]) : ISet[T] =
new ISet[T](s.filter(e => f.test(e)))
def contains(p1: T): Boolean =
s.contains(p1)
override def iterator(): util.Iterator[T] =
new ISetIterator[T](s)
def withAll(p1: util.Collection[T]): ISet[T] =
if(p1.isEmpty)
this
else
new ISet(s ++ p1.asScala)
def containsAll(p1: util.Collection[T]): Boolean = p1.asScala.forall(item => s.contains(item))
def isEmpty: Boolean = s.isEmpty
def `with`(p1: T): ISet[T] =
new ISet(s + p1)
def stream : java.util.stream.Stream[T] = s.asJavaCollection.stream()
override def toString = s.toString()
}
|
athy/fape
|
structures/src/main/scala/fr/laas/fape/structures/ISet.scala
|
Scala
|
bsd-2-clause
| 1,798
|
/*
* The MIT License (MIT)
*
* Copyright (c) 2014 MineFormers
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package de.mineformers.core.client.ui.skin.drawable
import com.google.gson.JsonObject
import de.mineformers.core.client.ui.skin.DrawableDeserializer
import de.mineformers.core.client.ui.skin.drawable.DynamicTexture.{Corner, Side}
import de.mineformers.core.client.util.RenderUtils
import de.mineformers.core.util.ResourceUtils.Resource
import de.mineformers.core.util.math.shape2d.{Point, Rectangle, Size}
import de.mineformers.core.util.renderer.GuiUtils
/**
* DynamicTexture
*
* @author PaleoCrafter
*/
object DynamicTexture {
object Corner {
final val TopLeft = 0
final val TopRight = 1
final val BottomRight = 2
final val BottomLeft = 3
}
object Side {
final val Top = 0
final val Right = 1
final val Bottom = 2
final val Left = 3
}
class Deserializer extends DrawableDeserializer {
override def deserialize(typ: String, json: JsonObject): DrawableTexture = {
var texture: DynamicTexture = null
if (json.has("topLeft")) {
val tlObject = json.getAsJsonObject("topLeft")
val tObject = json.getAsJsonObject("top")
val iObject = json.getAsJsonObject("inner")
texture = new DynamicTexture(getUVsFromObject(tlObject), getUVsFromObject(tObject), getUVsFromObject(iObject))
} else if (json.has("corners")) {
val c = json.getAsJsonObject("corners")
val s = json.getAsJsonObject("sides")
val inner = json.getAsJsonObject("inner")
val uvR: JsonObject => Rectangle = getUVsFromObject
texture = new DynamicTexture(Array(uvR(c.getAsJsonObject("topLeft")), uvR(c.getAsJsonObject("topRight")), uvR(c.getAsJsonObject("bottomLeft")), uvR(c.getAsJsonObject("bottomRight"))),
Array(uvR(s.getAsJsonObject("top")), uvR(s.getAsJsonObject("right")), uvR(s.getAsJsonObject("bottom")), uvR(s.getAsJsonObject("left"))), getUVsFromObject(inner))
} else {
texture = new DynamicTexture(Rectangle(0, 0, 0, 0), Rectangle(0, 0, 0, 0), Rectangle(0, 0, 0, 0))
}
if (json.has("repeatSides"))
texture.repeatSides = json.getAsJsonPrimitive("repeatSides").getAsBoolean
if (json.has("repeatInner"))
texture.repeatInner = json.getAsJsonPrimitive("repeatInner").getAsBoolean
texture
}
}
}
class DynamicTexture(_texture: Resource, textureWidth: Int, textureHeight: Int, private var corners: Array[Rectangle], private var sides: Array[Rectangle], private var inner: Rectangle) extends DrawableTexture {
this.texture = _texture
this.textureSize = Size(textureWidth, textureHeight)
def this(corners: Array[Rectangle], sides: Array[Rectangle], inner: Rectangle) = this(null, 0, 0, corners, sides, inner)
def this(texture: Resource, textureWidth: Int = 16, textureHeight: Int = 16, topLeft: Rectangle, top: Rectangle, inner: Rectangle) = this(texture, textureWidth, textureHeight, Array(topLeft, topLeft.translate(topLeft.width + top.width, 0), topLeft.translate(topLeft.width + top.width, topLeft.height + top.width), topLeft.translate(0, topLeft.height + top.width)), Array(top, top.translate(top.width, topLeft.height).resize(top.size.invert), top.translate(0, top.width + top.height), top.translate(-topLeft.width, top.height).resize(top.size.invert)), inner)
def this(topLeft: Rectangle, top: Rectangle, inner: Rectangle) = this(null, 0, 0, topLeft, top, inner)
override def init(): Unit = {
if (inner.width == 0) {
val topLeft = Rectangle(0, 0, textureSize.width / 3, textureSize.height / 3)
val top = Rectangle(topLeft.width, 0, textureSize.width / 3, textureSize.height / 3)
inner = Rectangle(topLeft.width, topLeft.height, textureSize.width / 3, textureSize.height / 3)
corners = Array(topLeft, topLeft.translate(topLeft.width + top.width, 0), topLeft.translate(topLeft.width + top.width, topLeft.height + top.width), topLeft.translate(0, topLeft.height + top.width))
sides = Array(top, top.translate(top.width, topLeft.height).resize(top.size.invert), top.translate(0, top.width + top.height), top.translate(-topLeft.width, top.height).resize(top.size.invert))
}
}
override def draw(mousePos: Point, pos: Point, z: Int): Unit = {
utils.resetColor()
RenderUtils.bindTexture(texture)
for (i <- 0 until corners.length)
drawCorner(i, pos + cornerPos(i), z)
for (i <- 0 until sides.length)
drawSide(i, pos + sidePos(i), i % 2 != 0, z)
drawInner(pos +(corners(Corner.TopLeft).width, corners(Corner.TopLeft).height), z)
}
def drawCorner(cornerId: Int, pos: Point, z: Int): Unit = {
val corner = corners(cornerId)
GuiUtils.drawQuad(pos.x, pos.y, z, corner.width, corner.height, scaleU(corner.x), scaleV(corner.y), scaleU(corner.end.x), scaleV(corner.end.y))
}
def drawSide(sideId: Int, pos: Point, vertical: Boolean, z: Int): Unit = {
val side = sides(sideId)
val width = if (vertical) side.width else size.width - corners(Corner.TopLeft).width - corners(Corner.TopRight).width
val height = if (vertical) size.height - corners(Corner.TopLeft).height - corners(Corner.BottomLeft).height else side.height
if (repeatSides) {
if (vertical)
GuiUtils.drawRectangleYRepeated(pos.x, pos.y, z, width, height, scaleU(side.x), scaleV(side.y), scaleU(side.end.x), scaleV(side.end.y), side.height)
else
GuiUtils.drawRectangleXRepeated(pos.x, pos.y, z, width, height, scaleU(side.x), scaleV(side.y), scaleU(side.end.x), scaleV(side.end.y), side.width)
} else
GuiUtils.drawQuad(pos.x, pos.y, z, width, height, scaleU(side.x), scaleV(side.y), scaleU(side.end.x), scaleV(side.end.y))
}
def drawInner(pos: Point, z: Int): Unit = {
val width = size.width - corners(Corner.TopLeft).width - corners(Corner.TopRight).width
val height = size.height - corners(Corner.TopLeft).height - corners(Corner.BottomLeft).height
if (repeatInner)
GuiUtils.drawRectangleRepeated(pos.x, pos.y, z, width, height, scaleU(inner.x), scaleV(inner.y), scaleU(inner.end.x), scaleV(inner.end.y), inner.width, inner.height)
else
GuiUtils.drawQuad(pos.x, pos.y, z, width, height, scaleU(inner.x), scaleV(inner.y), scaleU(inner.end.x), scaleV(inner.end.y))
}
def cornerPos(corner: Int): Point = corner match {
case Corner.TopLeft => Point(0, 0)
case Corner.TopRight => Point(size.width - corners(Corner.TopRight).width, 0)
case Corner.BottomRight => Point(size.width - corners(Corner.BottomRight).width, corners(Corner.TopLeft).height + innerHeight)
case Corner.BottomLeft => Point(0, corners(Corner.TopLeft).height + innerHeight)
case _ => Point(0, 0)
}
def sidePos(side: Int): Point = side match {
case Side.Top => Point(corners(Corner.TopLeft).width, 0)
case Side.Right => cornerPos(Corner.TopRight) +(0, corners(Corner.TopRight).height)
case Side.Bottom => cornerPos(Corner.BottomLeft) +(corners(Corner.BottomLeft).width, 0)
case Side.Left => Point(0, corners(Corner.TopLeft).height)
case _ => Point(0, 0)
}
def innerWidth = size.width - corners(Corner.TopLeft).width - corners(Corner.TopRight).width
def innerHeight = size.height - corners(Corner.TopLeft).height - corners(Corner.BottomLeft).height
def scaleU(u: Int): Float = scaleU * u
def scaleV(v: Int): Float = scaleV * v
override def toString: String = s"${corners.mkString("[", ",", "]")},${sides.mkString("[", ",", "]")},$inner"
var repeatSides = true
var repeatInner = true
lazy val scaleU = 1F / textureSize.width
lazy val scaleV = 1F / textureSize.height
}
|
MineFormers/MFCore
|
src/main/scala/de/mineformers/core/client/ui/skin/drawable/DynamicTexture.scala
|
Scala
|
mit
| 8,668
|
package io.kaitai.struct
import io.kaitai.struct.format._
import io.kaitai.struct.precompile.CalculateSeqSizes
import io.kaitai.struct.translators.RubyTranslator
abstract class DocClassCompiler(classSpecs: ClassSpecs, topClass: ClassSpec) extends AbstractCompiler {
val provider = new ClassTypeProvider(classSpecs, topClass)
val translator = new RubyTranslator(provider)
// TODO: move it into SingleOutputFile equivalent
val out = new StringLanguageOutputWriter(indent)
def outFileName(topClass: ClassSpec): String
def indent: String
// END move to SingleOutputFile
def nowClass: ClassSpec = provider.nowClass
def nowClassName = provider.nowClass.name
override def compile: CompileLog.SpecSuccess = {
fileHeader(topClass)
compileClass(topClass)
fileFooter(topClass)
CompileLog.SpecSuccess(
"",
List(CompileLog.FileSuccess(
outFileName(topClass),
out.result
))
)
}
def compileClass(curClass: ClassSpec): Unit = {
provider.nowClass = curClass
classHeader(curClass)
// Sequence
compileSeq(curClass)
// Instances
curClass.instances.foreach { case (_, instSpec) =>
instSpec match {
case pis: ParseInstanceSpec =>
compileParseInstance(curClass, pis)
case vis: ValueInstanceSpec =>
compileValueInstance(vis)
}
}
// Enums
curClass.enums.foreach { case(enumName, enumColl) => compileEnum(enumName, enumColl) }
// Recursive types
curClass.types.foreach { case (_, intClass) => compileClass(intClass) }
classFooter(curClass)
}
def compileSeq(curClass: ClassSpec): Unit = {
seqHeader(curClass)
CalculateSeqSizes.forEachSeqAttr(curClass, (attr, seqPos, sizeElement, sizeContainer) => {
compileSeqAttr(curClass, attr, seqPos, sizeElement, sizeContainer)
})
seqFooter(curClass)
}
def fileHeader(topClass: ClassSpec): Unit
def fileFooter(topClass: ClassSpec): Unit
def classHeader(classSpec: ClassSpec): Unit
def classFooter(classSpec: ClassSpec): Unit
def seqHeader(classSpec: ClassSpec): Unit
def seqFooter(classSpec: ClassSpec): Unit
def compileSeqAttr(classSpec: ClassSpec, attr: AttrSpec, seqPos: Option[Int], sizeElement: Sized, sizeContainer: Sized): Unit
def compileParseInstance(classSpec: ClassSpec, inst: ParseInstanceSpec): Unit
def compileValueInstance(vis: ValueInstanceSpec): Unit
def compileEnum(enumName: String, enumColl: EnumSpec): Unit
}
|
kaitai-io/kaitai_struct_compiler
|
shared/src/main/scala/io/kaitai/struct/DocClassCompiler.scala
|
Scala
|
gpl-3.0
| 2,484
|
package org.algorithms
import scala.annotation.tailrec
import scala.io.Source._
class PathResolver {
@tailrec
private def visit(path:String, index: Int, current: Int, increment: Int, house: Set[Int]): Set[Int] = {
val width = Byte.MaxValue
if (index >= path.length) house
else path.charAt(index) match {
case 'v' => visit(path, index + increment, current - width, increment, house + (current - width))
case '^' => visit(path, index + increment, current + width, increment, house + (current + width))
case '<' => visit(path, index + increment, current - 1, increment, house + (current - 1))
case '>' => visit(path, index + increment, current + 1, increment, house + (current + 1))
}
}
def visitedHouse(path: String): Int = {
visit(path, 0, 0, 1, Set(0)).size
}
def visitedHouseWithRobot(path: String): Int = {
(visit(path, 0, 0, 2, Set(0)) union visit(path, 1, 0, 2, Set(0))).size
}
}
object PathResolver {
def apply() = new PathResolver
def main(args: Array[String]): Unit = {
val pr = PathResolver()
val line = fromFile("advent-code/src/main/resources/path").getLines.next
println(pr.visitedHouse(line))
println(pr.visitedHouseWithRobot(line))
}
}
|
Alex-Diez/Scala-Algorithms
|
advent-code/src/main/scala/org/algorithms/PathResolver.scala
|
Scala
|
mit
| 1,323
|
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
// scalastyle:off line.size.limit
/*
* Ported by Alistair Johnson from
* https://github.com/gwtproject/gwt/blob/master/user/test/com/google/gwt/emultest/java/math/BigDecimalConvertTest.java
*/
// scalastyle:on line.size.limit
package org.scalajs.testsuite.javalib.math
import java.math._
import org.junit.Test
import org.junit.Assert._
import org.scalajs.testsuite.utils.AssertThrows.assertThrows
class BigDecimalConvertTest {
@Test def testByteValue(): Unit = {
assertEquals(1.toByte, BigDecimal.ONE.byteValue())
assertEquals(BigDecimal.valueOf(255).byteValue(), -1.toByte)
assertEquals(BigDecimal.ONE.byteValueExact(), 1.toByte)
assertThrows(classOf[ArithmeticException], BigDecimal.valueOf(255).byteValueExact())
}
@Test def testDoubleValueNeg(): Unit = {
val a = "-123809648392384754573567356745735.63567890295784902768787678287E+21"
val aNumber = new BigDecimal(a)
val result = -1.2380964839238476E53
assertEquals(aNumber.doubleValue(), result, 0.0)
}
@Test def testDoubleValueNegInfinity(): Unit = {
val a = "-123809648392384754573567356745735.63567890295784902768787678287E+400"
val aNumber = new BigDecimal(a)
val result = Double.NegativeInfinity
assertTrue(result == aNumber.doubleValue())
}
@Test def testDoubleValuePos(): Unit = {
val a = "123809648392384754573567356745735.63567890295784902768787678287E+21"
val aNumber = new BigDecimal(a)
val result = 1.2380964839238476E53
assertEquals(aNumber.doubleValue(), result, 0.0)
}
@Test def testDoubleValuePosInfinity(): Unit = {
val a = "123809648392384754573567356745735.63567890295784902768787678287E+400"
val aNumber = new BigDecimal(a)
val result = Double.PositiveInfinity
assertTrue(result == aNumber.doubleValue())
}
@Test def testFloatValueNeg(): Unit = {
val a = "-1238096483923847.6356789029578E+21"
val aNumber = new BigDecimal(a)
val result = -1.2380965E36f
assertTrue(Math.abs(aNumber.floatValue() - result) < 1E29)
}
@Test def testFloatValueNegInfinity(): Unit = {
val a = "-123809648392384755735.63567887678287E+200"
val aNumber = new BigDecimal(a)
val result = Float.NegativeInfinity
assertTrue(aNumber.floatValue() == result)
}
@Test def testFloatValuePos(): Unit = {
val a = "1238096483923847.6356789029578E+21"
val aNumber = new BigDecimal(a)
val result = 1.2380965E36f
assertTrue(Math.abs(aNumber.floatValue() - result) < 1E29)
}
@Test def testFloatValuePosInfinity(): Unit = {
val a = "123809648373567356745735.6356789787678287E+200"
val aNumber = new BigDecimal(a)
val result = Float.PositiveInfinity
assertTrue(aNumber.floatValue() == result)
}
@Test def testIntValueNeg(): Unit = {
val a = "-123809648392384754573567356745735.63567890295784902768787678287E+21"
val aNumber = new BigDecimal(a)
val result = 218520473
assertEquals(aNumber.intValue(), result)
assertThrows(classOf[ArithmeticException], aNumber.intValueExact())
}
@Test def testIntValuePos(): Unit = {
val a = "123809648392384754573567356745735.63567890295784902768787678287E+21"
val aNumber = new BigDecimal(a)
val result = -218520473
assertEquals(aNumber.intValue(), result)
assertThrows(classOf[ArithmeticException], aNumber.intValueExact())
}
@Test def testLongValueNeg(): Unit = {
val a = "-123809648392384754573567356745735.63567890295784902768787678287E+21"
val aNumber = new BigDecimal(a)
val result = -1246043477766677607L
assertTrue(aNumber.longValue() == result)
assertThrows(classOf[ArithmeticException], aNumber.longValueExact())
}
@Test def testLongValuePos(): Unit = {
val a = "123809648392384754573567356745735.63567890295784902768787678287E+21"
val aNumber = new BigDecimal(a)
val result = 1246043477766677607L
assertTrue(aNumber.longValue() == result)
assertThrows(classOf[ArithmeticException], aNumber.longValueExact())
}
@Test def testLongValueMinMaxValues(): Unit = {
val longMaxValue = new BigDecimal(Long.MaxValue)
val longMinValue = new BigDecimal(Long.MinValue)
assertEquals(Long.MaxValue, longMaxValue.longValue)
assertEquals(Long.MinValue, longMinValue.longValue)
assertEquals(Long.MinValue, longMaxValue.add(BigDecimal.ONE).longValue)
assertEquals(Long.MaxValue, longMinValue.subtract(BigDecimal.ONE).longValue)
assertEquals(Long.MaxValue, longMaxValue.longValueExact)
assertEquals(Long.MinValue, longMinValue.longValueExact)
assertThrows(classOf[ArithmeticException], longMaxValue.add(BigDecimal.ONE).longValueExact)
assertThrows(classOf[ArithmeticException], longMinValue.subtract(BigDecimal.ONE).longValueExact)
}
@Test def testSmallLongValueExact(): Unit = {
def test(x: Long): Unit =
assertEquals(x, new BigDecimal(x).longValueExact)
test(0L)
test(5L)
test(-5L)
}
@Test def testLongValueExactNonWhole(): Unit = {
def test(smallValue: Long, scale: Int): Unit = {
val value = new BigDecimal(java.math.BigInteger.valueOf(smallValue), scale)
assertThrows(classOf[ArithmeticException], value.longValueExact)
}
test(1L, 1)
test(15L, 1)
test(-1L, 1)
test(-15L, 1)
}
@Test def bigDecimal9Point223372E285625056IsNotValidLong_Issue2314(): Unit = {
val num = new BigDecimal("9.223372E+285625056")
// Sanity checks
assertEquals(-285625050, num.scale)
assertEquals(7, num.precision)
assertEquals("9.223372E+285625056", num.toString)
// Source of issue
assertThrows(classOf[ArithmeticException], num.longValueExact)
// Code from issue #2314
assertFalse(scala.math.BigDecimal("9.223372E+285625056").isValidLong)
assertFalse(scala.math.BigDecimal(10, scale = Int.MinValue).isValidLong)
assertFalse(scala.math.BigDecimal(10, scale = Int.MaxValue).isValidLong)
}
@Test def testScaleByPowerOfTen1(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = 13
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val result = aNumber.scaleByPowerOfTen(10)
val res = "1231212478987482988429808779810457634781384756794.987"
val resScale = 3
assertEquals(result.toString, res)
assertEquals(result.scale(), resScale, 0d)
}
@Test def testScaleByPowerOfTen2(): Unit = {
val a = "1231212478987482988429808779810457634781384756794987"
val aScale = -13
val aNumber = new BigDecimal(new BigInteger(a), aScale)
val result = aNumber.scaleByPowerOfTen(10)
val res = "1.231212478987482988429808779810457634781384756794987E+74"
val resScale = -23
assertEquals(result.toString, res)
assertEquals(result.scale(), resScale)
}
@Test def testShortValue(): Unit = {
val value = BigDecimal.valueOf(0x13fff)
assertEquals(value.shortValue(), 0x3fff)
assertThrows(classOf[ArithmeticException], value.shortValueExact())
}
@Test def testToBigIntegerExact1(): Unit = {
val a = "-123809648392384754573567356745735.63567890295784902768787678287E+45"
val aNumber = new BigDecimal(a)
val res = "-123809648392384754573567356745735635678902957849027687876782870000000000000000"
val result = aNumber.toBigIntegerExact()
assertEquals(result.toString, res)
}
@Test def testToBigIntegerExactException(): Unit = {
val a = "-123809648392384754573567356745735.63567890295784902768787678287E-10"
val aNumber = new BigDecimal(a)
assertThrows(classOf[ArithmeticException], aNumber.toBigIntegerExact())
}
@Test def testToBigIntegerNeg1(): Unit = {
val a = "-123809648392384754573567356745735.63567890295784902768787678287E+21"
val bNumber = new BigInteger("-123809648392384754573567356745735635678902957849027687")
val aNumber = new BigDecimal(a)
val result = aNumber.toBigInteger()
assertTrue(result == bNumber)
}
@Test def testToBigIntegerNeg2(): Unit = {
val a = "-123809648392384754573567356745735.63567890295784902768787678287E+15"
val bNumber = new BigInteger("-123809648392384754573567356745735635678902957849")
val aNumber = new BigDecimal(a)
val result = aNumber.toBigInteger()
assertTrue(result == bNumber)
}
@Test def testToBigIntegerNeg3(): Unit = {
val a = "-123809648392384754573567356745735.63567890295784902768787678287E+45"
val bNumber = new BigInteger("-123809648392384754573567356745735635678902957849027687876782870000000000000000")
val aNumber = new BigDecimal(a)
val result = aNumber.toBigInteger()
assertTrue(result == bNumber)
}
@Test def testToBigIntegerPos1(): Unit = {
val a = "123809648392384754573567356745735.63567890295784902768787678287E+21"
val bNumber = new BigInteger("123809648392384754573567356745735635678902957849027687")
val aNumber = new BigDecimal(a)
val result = aNumber.toBigInteger()
assertTrue(result == bNumber)
}
@Test def testToBigIntegerPos2(): Unit = {
val a = "123809648392384754573567356745735.63567890295784902768787678287E+15"
val bNumber = new BigInteger("123809648392384754573567356745735635678902957849")
val aNumber = new BigDecimal(a)
val result = aNumber.toBigInteger()
assertTrue(result == bNumber)
}
@Test def testToBigIntegerPos3(): Unit = {
val a = "123809648392384754573567356745735.63567890295784902768787678287E+45"
val bNumber = new BigInteger("123809648392384754573567356745735635678902957849027687876782870000000000000000")
val aNumber = new BigDecimal(a)
val result = aNumber.toBigInteger()
assertTrue(result == bNumber)
}
@Test def testToBigIntegerZero(): Unit = {
val a = "-123809648392384754573567356745735.63567890295784902768787678287E-500"
val bNumber = new BigInteger("0")
val aNumber = new BigDecimal(a)
val result = aNumber.toBigInteger()
assertTrue(result == bNumber)
}
@Test def testToEngineeringStringNeg(): Unit = {
val a = "-123809648392384754573567356745735.63567890295784902768787678287E-501"
val aNumber = new BigDecimal(a)
val result = "-123.80964839238475457356735674573563567890295784902768787678287E-471"
assertEquals(aNumber.toEngineeringString(), result)
}
@Test def testToEngineeringStringPos(): Unit = {
val a = "123809648392384754573567356745735.63567890295784902768787678287E-501"
val aNumber = new BigDecimal(a)
val result = "123.80964839238475457356735674573563567890295784902768787678287E-471"
assertEquals(aNumber.toEngineeringString(), result)
}
@Test def testToEngineeringStringZeroNegExponent(): Unit = {
val a = "0.0E-16"
val aNumber = new BigDecimal(a)
val result = "0.00E-15"
assertEquals(aNumber.toEngineeringString(), result)
}
@Test def testToEngineeringStringZeroPosExponent(): Unit = {
val a = "0.0E+16"
val aNumber = new BigDecimal(a)
val result = "0E+15"
assertEquals(aNumber.toEngineeringString(), result)
}
@Test def testToPlainStringNegNegExp(): Unit = {
val a = "-123809648392384754573567356745735.63567890295784902768787678287E-100"
val aNumber = new BigDecimal(a)
val result = "-0.00000000000000000000000000000000000000000000000000000" +
"0000000000000012380964839238475457356735674573563567890295784902768" +
"787678287"
assertTrue(aNumber.toPlainString() == result)
}
@Test def testToPlainStringNegPosExp(): Unit = {
val a = "-123809648392384754573567356745735.63567890295784902768787678287E100"
val aNumber = new BigDecimal(a)
val result = "-1238096483923847545735673567457356356789029578490276878" +
"7678287000000000000000000000000000000000000000000000000000000000000" +
"00000000000"
assertTrue(aNumber.toPlainString() == result)
}
@Test def testToPlainStringPosNegExp(): Unit = {
val a = "123809648392384754573567356745735.63567890295784902768787678287E-100"
val aNumber = new BigDecimal(a)
val result = "0.000000000000000000000000000000000000000000000000000000" +
"0000000000000123809648392384754573567356745735635678902957849027687" +
"87678287"
assertTrue(aNumber.toPlainString() == result)
}
@Test def testToPlainStringPosPosExp(): Unit = {
val a = "123809648392384754573567356745735.63567890295784902768787678287E+100"
val aNumber = new BigDecimal(a)
val result = "12380964839238475457356735674573563567890295784902768787" +
"6782870000000000000000000000000000000000000000000000000000000000000" +
"0000000000"
assertTrue(aNumber.toPlainString() == result)
}
@Test def testToStringNeg(): Unit = {
val a = "-123.4564563673567380964839238475457356735674573563567890295784902768787678287E-5"
val aNumber = new BigDecimal(a)
val result = "-0.001234564563673567380964839238475457356735674573563567890295784902768787678287"
assertTrue(aNumber.toString == result)
}
@Test def testToStringPos(): Unit = {
val a = "123809648392384754573567356745735.63567890295784902768787678287E-500"
val aNumber = new BigDecimal(a)
val result = "1.2380964839238475457356735674573563567890295784902768787678287E-468"
assertTrue(aNumber.toString == result)
}
@Test def testToStringZeroScale(): Unit = {
val a = "-123809648392384754573567356745735635678902957849027687876782870"
val aNumber = new BigDecimal(new BigInteger(a))
val result = "-123809648392384754573567356745735635678902957849027687876782870"
assertTrue(aNumber.toString == result)
}
@Test def testValueOfDoubleNaN(): Unit = {
val a = Double.NaN
assertThrows(classOf[NumberFormatException], BigDecimal.valueOf(a))
}
@Test def testValueOfDoubleNeg(): Unit = {
val a = -65678765876567576.98788767
val result = BigDecimal.valueOf(a)
val res = "-65678765876567576"
val resScale = 0
assertEquals(result.toString, res)
assertEquals(result.scale(), resScale)
}
@Test def testValueOfDoublePos1(): Unit = {
val a = 65678765876567576.98788767
val result = BigDecimal.valueOf(a)
val res = "65678765876567576"
val resScale = 0
assertEquals(result.toString, res)
assertEquals(result.scale(), resScale)
}
@Test def testValueOfDoublePos2(): Unit = {
val a = 12321237576.98788767
val result = BigDecimal.valueOf(a)
val res = "12321237576.987888"
val resScale = 6
assertEquals(result.toString, res)
assertEquals(result.scale(), resScale)
}
@Test def testValueOfDoublePos3(): Unit = {
val a = 12321237576.9878838
val result = BigDecimal.valueOf(a)
val res = "12321237576.98788"
val resScale = 6
assertTrue(result.toString.startsWith(res))
assertEquals(result.scale(), resScale)
}
@Test def testValueOfNegScaleNeg(): Unit = {
val a = -98374823947823578L
val scale = -12
val aNumber = BigDecimal.valueOf(a, scale)
val result = "-9.8374823947823578E+28"
assertTrue(aNumber.toString == result)
}
@Test def testValueOfNegScalePos(): Unit = {
val a = -98374823947823578L
val scale = 12
val aNumber = BigDecimal.valueOf(a, scale)
val result = "-98374.823947823578"
assertTrue(aNumber.toString == result)
}
@Test def testValueOfNegZeroScale(): Unit = {
val a = -98374823947823578L
val aNumber = BigDecimal.valueOf(a)
val result = "-98374823947823578"
assertTrue(aNumber.toString == result)
}
@Test def testValueOfPosScaleNeg(): Unit = {
val a = 98374823947823578L
val scale = -12
val aNumber = BigDecimal.valueOf(a, scale)
val result = "9.8374823947823578E+28"
assertTrue(aNumber.toString == result)
}
@Test def testValueOfPosScalePos(): Unit = {
val a = 98374823947823578L
val scale = 12
val aNumber = BigDecimal.valueOf(a, scale)
val result = "98374.823947823578"
assertTrue(aNumber.toString == result)
}
@Test def testValueOfPosZeroScale(): Unit = {
val a = 98374823947823578L
val aNumber = BigDecimal.valueOf(a)
val result = "98374823947823578"
assertTrue(aNumber.toString == result)
}
@Test def testValueOfZeroScaleNeg(): Unit = {
val scale = -2
val number = BigDecimal.valueOf(0L, scale)
assertEquals(number.toString, "0E+2")
assertEquals(number.scale(), scale)
}
@Test def testValueOfZeroScalePos(): Unit = {
val scale = 1
val number = BigDecimal.valueOf(0L, scale)
assertEquals(number.toString, "0.0")
assertEquals(number.scale(), scale)
}
}
|
scala-js/scala-js
|
test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/math/BigDecimalConvertTest.scala
|
Scala
|
apache-2.0
| 16,682
|
object IfStatementSimple {
def foo(x: Boolean => String) = x(false)
foo(/*start*/if (_) "" else "g"/*end*/)
}
//(Boolean) => String
|
LPTK/intellij-scala
|
testdata/typeInference/expected/placeholder/IfStatementSimple.scala
|
Scala
|
apache-2.0
| 136
|
package controllers
import play.api.mvc._
import play.api.test._
import scala.concurrent.Future
object RootControllerSpec extends PlaySpecification with Results {
"RootController#index" should {
"should be valid" in {
val controller = new RootController()
val result = controller.index().apply(FakeRequest())
val bodyText = contentAsString(result)
bodyText must be equalTo "OK"
}
}
}
|
dtaniwaki/akka-pusher-play-app
|
test/controllers/RootControllerSpec.scala
|
Scala
|
mit
| 423
|
package net.tomasherman.specus.server.net
import net.tomasherman.specus.common.api.net.Packet
import net.tomasherman.specus.server.api.net.Codec
import org.specs2.mutable._
import org.jboss.netty.buffer.ChannelBuffer
/**
* This file is part of Specus.
*
* Specus is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Specus is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* You should have received a copy of the GNU General Public License
* along with Specus. If not, see <http://www.gnu.org/licenses/>.
*
*/
class Packet1 extends Packet
class Packet2 extends Packet
class Packet3 extends Packet
class Codec1 extends Codec[Packet1](0x01,classOf[Packet1]){
def decode(buffer: ChannelBuffer) = new Packet1
def encode(packet: Packet1) = null
}
class Codec2 extends Codec[Packet2](0x02,classOf[Packet2]){
def decode(buffer: ChannelBuffer) = new Packet2
def encode(packet: Packet2) = null
}
class FailCodec extends Codec[Packet2](0x03,classOf[Packet2]){
def decode(buffer: ChannelBuffer) = new Packet2
def encode(packet: Packet2) = null
}
class CodecSpec extends Specification{
def getTestRepo = {
val repo = new SimpleCodecRepository
repo.registerCodec(classOf[Codec1])
repo.registerCodec(classOf[Codec2])
repo
}
"CodecRepository" should {
"lookup by id properly" in {
val repo = getTestRepo
val res1 = repo.lookupCodec(0x01.toByte)
res1 must_!= None
val instance = res1.get
instance match {
case x:Codec1 => {success}
case _ => {failure("Wrong codec returned")}
}
val res2 = repo.lookupCodec(0x02.toByte)
res2 must_!= None
val instance2 = res2.get
instance2 match {
case x:Codec2 => {success}
case _ => {failure("Wrong codec returned")}
}
}
"lookup by packet properly" in {
val repo = getTestRepo
val res1 = repo.lookupCodec(new Packet1)
res1 must_!= None
val instance1 = res1.get
instance1 match {
case x:Codec1 => {success}
case _ => {failure("Wrong codec returned")}
}
val res2 = repo.lookupCodec(new Packet2)
res2 must_!= None
val instance2 = res2.get
instance2 match {
case x:Codec2 => {success}
case _ => {failure("Wrong codec returned")}
}
}
"lookup should fail properly" in {
val repo = getTestRepo
repo.lookupCodec(new Packet3) must_== None
repo.lookupCodec(0x03.toByte) must_== None
repo.lookupCodec(null) must throwAn[NullPointerException]
}
"register should fail on null input" in {
val repo = getTestRepo
repo.registerCodec(null) must_== false
}
"register should fail when another codec is already registered with same id or packet class" in {
val repo = getTestRepo
repo.registerCodec(classOf[Codec1]) must_== false
repo.registerCodec(classOf[Codec2]) must_== false
repo.registerCodec(classOf[FailCodec]) must_== false
}
}
}
|
tomasherman/specus
|
server/src/test/scala/net/tomasherman/specus/server/net/CodecRepositorySpec.scala
|
Scala
|
gpl-3.0
| 3,369
|
package net.katsstuff.chitchat.chat.data
import java.util.Optional
import org.spongepowered.api.Sponge
import org.spongepowered.api.data.manipulator.DataManipulatorBuilder
import org.spongepowered.api.data.manipulator.immutable.common.AbstractImmutableSingleData
import org.spongepowered.api.data.manipulator.mutable.common.AbstractSingleData
import org.spongepowered.api.data.merge.MergeFunction
import org.spongepowered.api.data.persistence.AbstractDataBuilder
import org.spongepowered.api.data.value.immutable.ImmutableValue
import org.spongepowered.api.data.value.mutable.Value
import org.spongepowered.api.data.{DataContainer, DataHolder, DataView}
import net.katsstuff.chitchat.ChitChatPlugin
class ChannelData(_value: String)(implicit plugin: ChitChatPlugin)
extends AbstractSingleData[String, ChannelData, ImmutableChannelData](_value, plugin.versionHelper.ChannelKey)
with Comparable[ChannelData] /*Adding in Comparable makes it easy to keep compat with API 4*/ {
override def asImmutable(): ImmutableChannelData = new ImmutableChannelData(getValue)
override def compareTo(o: ChannelData): Int = getValue.compareTo(o.getValue)
override def getValueGetter: Value[_] =
Sponge.getRegistry.getValueFactory.createValue(plugin.versionHelper.ChannelKey, getValue)
override def from(container: DataContainer): Optional[ChannelData] = from(container)
def from(view: DataView): Optional[ChannelData] = {
view.getString(plugin.versionHelper.ChannelKey.getQuery).ifPresent(s => setValue(s))
Optional.of(this)
}
override def copy(): ChannelData = new ChannelData(getValue)
override def fill(dataHolder: DataHolder, overlap: MergeFunction): Optional[ChannelData] = {
val merged = overlap.merge(this, dataHolder.get(classOf[ChannelData]).orElse(null))
setValue(merged.getValue)
Optional.of(this)
}
override def getContentVersion: Int = 1
}
class ImmutableChannelData(_value: String)(implicit plugin: ChitChatPlugin)
extends AbstractImmutableSingleData[String, ImmutableChannelData, ChannelData](
_value,
plugin.versionHelper.ChannelKey
) with Comparable[ImmutableChannelData] {
override def asMutable(): ChannelData = new ChannelData(value)
override def getValueGetter: ImmutableValue[_] =
Sponge.getRegistry.getValueFactory.createValue(plugin.versionHelper.ChannelKey, getValue).asImmutable()
override def compareTo(o: ImmutableChannelData): Int = value.compareTo(o.value)
override def getContentVersion: Int = 1
}
class ChannelDataBuilder(implicit plugin: ChitChatPlugin)
extends AbstractDataBuilder[ChannelData](classOf[ChannelData], 1)
with DataManipulatorBuilder[ChannelData, ImmutableChannelData] {
override def create(): ChannelData = new ChannelData("Global")
override def createFrom(dataHolder: DataHolder): Optional[ChannelData] = create().fill(dataHolder)
override def buildContent(container: DataView): Optional[ChannelData] = create().from(container)
}
|
Katrix-/ChitChat
|
shared/src/main/scala/net/katsstuff/chitchat/chat/data/ChannelData.scala
|
Scala
|
mit
| 2,982
|
package com.sai.pumpkin.managedbeans
import java.io.File
import java.io.FileInputStream
import java.io.Serializable
import java.text.SimpleDateFormat
import java.util.Date
import scala.beans.BeanProperty
import scala.collection.JavaConversions.asScalaBuffer
import scala.collection.JavaConversions.seqAsJavaList
import scala.collection.immutable.TreeMap
import org.primefaces.context.RequestContext
import org.primefaces.event.NodeSelectEvent
import org.primefaces.model.DefaultStreamedContent
import org.primefaces.model.DefaultTreeNode
import org.primefaces.model.StreamedContent
import org.primefaces.model.TreeNode
import org.primefaces.model.chart.PieChartModel
import org.primefaces.model.mindmap.DefaultMindmapNode
import org.primefaces.model.mindmap.MindmapNode
import org.springframework.data.mongodb.core.MongoTemplate
import org.springframework.data.mongodb.core.mapping.Document
import org.springframework.data.mongodb.core.query.Criteria
import org.springframework.data.mongodb.core.query.Query
import org.springframework.data.mongodb.core.query.Update
import org.springframework.web.context.support.WebApplicationContextUtils
import com.sai.pumpkin.domain.ArtifactDetail
import com.sai.pumpkin.domain.ChangeSet
import com.sai.pumpkin.domain.ConsumerDetail
import com.sai.pumpkin.domain.ReleaseNotes
import javax.faces.application.FacesMessage
import javax.faces.context.FacesContext
import javax.servlet.ServletContext
class AllWebservicesController extends Serializable {
@BeanProperty
var wsVersions = new java.util.ArrayList[String]()
@BeanProperty
var wsConsumers: java.util.List[ConsumerDetail] = new java.util.ArrayList[ConsumerDetail]()
@BeanProperty
var selectedWs: String = _
@BeanProperty
var showTabs: Boolean = _
@BeanProperty
var root: TreeNode = _
@BeanProperty
var selectedNode: TreeNode = _
@BeanProperty
var mavenVersionSelected: Boolean = _
@BeanProperty
var serviceVersion: String = _
@BeanProperty
var mavenVersion: String = _
@BeanProperty
var rootMaven: MindmapNode = _
@BeanProperty
var projects: java.util.List[String] = new java.util.ArrayList[String]()
@BeanProperty
var allMavenIdentifiers: java.util.List[ArtifactDetail] = new java.util.ArrayList[ArtifactDetail]()
@BeanProperty
var allFilteredMavenIdentifiers: java.util.List[ArtifactDetail] = new java.util.ArrayList[ArtifactDetail]()
@BeanProperty
var consumerArtifactIdentifier: String = _
@BeanProperty
var consumerGroupId: String = _
@BeanProperty
var consumerArtifactId: String = _
@BeanProperty
var consumerVersion: String = _
@BeanProperty
var consumerClassifier: String = _
@BeanProperty
var consumerStartDate: Date = _
@BeanProperty
var consumerEndDate: Date = _
@BeanProperty
var consumerTag: String = _
@BeanProperty
var consumerName: String = _
@BeanProperty
var artifactFile: StreamedContent = _
@BeanProperty
var currArtifact: ArtifactDetail = _
@BeanProperty
var interfacesAvailable: Boolean = _
@BeanProperty
var interfacesPath: String = _
@BeanProperty
var allReleases: java.util.List[ReleaseNotes] = new java.util.ArrayList[ReleaseNotes]()
@BeanProperty
var allChangesets: java.util.List[ChangeSet] = new java.util.ArrayList[ChangeSet]()
@BeanProperty
var pie: PieChartModel = _
val servletContext = FacesContext.getCurrentInstance().getExternalContext().getContext().asInstanceOf[ServletContext]
val appContext = WebApplicationContextUtils.getWebApplicationContext(servletContext)
val mongoTemplate = appContext.getBean(classOf[MongoTemplate])
val query5 = new Query()
query5.addCriteria(Criteria.where("artifactId").regex("-ws"))
val webserviceArtifacts = mongoTemplate.find(query5, classOf[ArtifactDetail])
wsVersions ++= webserviceArtifacts.groupBy(_.getArtifactId).keys.toList
def handleCityChange() = {
println(" ----------------" + selectedWs)
showTabs = selectedWs != null && selectedWs.length > 0
if (showTabs) createTree()
}
def createTree() = {
val query = new Query()
query.addCriteria(Criteria.where("artifactId").is(selectedWs))
val webserviceArtifacts = mongoTemplate.find(query, classOf[ArtifactDetail])
println(" 1 ==> " + webserviceArtifacts)
val map = webserviceArtifacts.groupBy(_.getClassifier)
println("2 ==> " + map)
root = new DefaultTreeNode("Root", null)
TreeMap(map.toSeq: _*).foreach(tuple => {
val node = new DefaultTreeNode(if (tuple._1.isEmpty) "(Versionless)" else tuple._1 + " (service version)", root)
tuple._2.foreach(a => {
new DefaultTreeNode(a.version + " (maven artifact version)", node)
})
})
}
def onNodeSelect(event: NodeSelectEvent) = {
selectedNode = event.getTreeNode()
mavenVersionSelected = !selectedNode.getData().toString().contains("V")
println("maven version selected: " + mavenVersionSelected)
if (selectedNode.getData().toString().startsWith("V")) serviceVersion = selectedNode.getData().toString().replace(" (service version)", "")
if (mavenVersionSelected) mavenVersion = selectedNode.getData().toString().replace("(maven artifact version)", "").trim
if (mavenVersionSelected) {
println(mavenVersion + " -------------")
val query6 = new Query()
query6.addCriteria(Criteria.where("artifactId").is(selectedWs).andOperator(Criteria.where("version").is(mavenVersion)))
val artifact = mongoTemplate.find(query6, classOf[ArtifactDetail]).get(0)
currArtifact = artifact
rootMaven = new DefaultMindmapNode(artifact.artifactId, artifact.groupId + ":" + artifact.artifactId + ":" + artifact.version + ":" + artifact.classifier, "FFCC00", false)
artifact.children.foreach(child => rootMaven.addNode(new DefaultMindmapNode(child.artifactId + "(" + child.version + ")", "data", "6e9ebf", true)))
mavenVersion = selectedNode.getData().toString().replace("(maven artifact version)", "").trim
val context = RequestContext.getCurrentInstance()
context.execute("info1.show()")
val query7 = new Query()
query7.addCriteria(Criteria.where("services.artifactId").is(selectedWs).andOperator(Criteria.where("services.version").is(mavenVersion)))
wsConsumers = mongoTemplate.find(query7, classOf[ConsumerDetail])
val query8 = new Query()
query8.addCriteria(Criteria.where("children.artifactId").is(selectedWs).andOperator(Criteria.where("children.version").is(mavenVersion)))
val _projects = mongoTemplate.find(query8, classOf[ArtifactDetail])
projects = _projects.map(a => a.groupId + " | " + a.artifactId + " | " + a.version).toList
val allIds = mongoTemplate.findAll(classOf[ArtifactDetail])
allMavenIdentifiers = allIds
allFilteredMavenIdentifiers = allIds
download()
val query = new Query()
query.addCriteria(Criteria.where("projectArtifact.children.artifactId").is(currArtifact.artifactId).andOperator(Criteria.where("projectArtifact.children.version").is(currArtifact.version)))
allReleases = mongoTemplate.find(query, classOf[ReleaseNotes])
println(" ------------- All Releases " + allReleases)
pie = new PieChartModel()
allChangesets.clear()
val query5 = new Query
query5.addCriteria(Criteria.where("groupId").is(artifact.groupId).and("artifactId").is(artifact.artifactId).and("version").is(artifact.version))
allChangesets.addAll(mongoTemplate.find(query5, classOf[ChangeSet]).toList)
val allChangesetsPerAuthor = allChangesets.groupBy(_.committer)
allChangesetsPerAuthor.map(tuple => pie.set(tuple._1, tuple._2.flatMap(_.entries).size))
}
}
def saveConsumer() = {
println("$$$$$$$$$$$$$$" + consumerArtifactId)
val query6 = new Query()
query6.addCriteria(Criteria.where("artifactId").is(selectedWs).andOperator(Criteria.where("version").is(mavenVersion)))
val wsartifact = mongoTemplate.find(query6, classOf[ArtifactDetail]).get(0)
val query7 = new Query()
query7.addCriteria(Criteria.where("artifactId").is(consumerArtifactId).andOperator(Criteria.where("version").is(consumerVersion)))
val consumerartifact = mongoTemplate.find(query7, classOf[ArtifactDetail])
val currArtifact = consumerartifact.size() match {
case 0 => { val art = new ArtifactDetail; art.groupId = consumerGroupId; art.artifactId = consumerArtifactId; art.version = consumerVersion; art.classifier = consumerClassifier; art }
case _ => consumerartifact.get(0)
}
val query8 = new Query()
query8.addCriteria(Criteria.where("artifactDetail.artifactId").is(consumerArtifactId).andOperator(Criteria.where("artifactDetail.version").is(consumerVersion)))
val existingConsumerDetail = mongoTemplate.find(query8, classOf[ConsumerDetail])
val consumerDetail = existingConsumerDetail.size match {
case 0 => new ConsumerDetail
case _ => existingConsumerDetail.get(0)
}
consumerDetail.services += wsartifact
consumerDetail.artifactDetail = currArtifact
val df = new SimpleDateFormat("dd/mm/yy")
consumerDetail.fromDate = consumerStartDate
consumerDetail.toDate = consumerEndDate
consumerDetail.name = consumerName
consumerDetail.tags = consumerTag
val update = new Update()
update.set("artifactDetail", consumerDetail.artifactDetail)
update.set("name", consumerDetail.name)
update.set("artifactDetail", consumerDetail.artifactDetail)
update.set("fromDate", consumerDetail.fromDate)
update.set("tags", consumerDetail.tags)
update.set("services", consumerDetail.services)
mongoTemplate.upsert(query8, update, classOf[ConsumerDetail])
RequestContext.getCurrentInstance().showMessageInDialog(new FacesMessage("Success", "Saved"))
}
def download() = {
}
}
|
SaiprasadKrishnamurthy/BigDash
|
src/main/scala/com/sai/pumpkin/managedbeans/AllWebservicesController.scala
|
Scala
|
apache-2.0
| 9,823
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.